Class/Object

org.platanios.tensorflow.api.ops.training.optimizers

Adam

Related Docs: object Adam | package optimizers

Permalink

case class Adam(learningRate: Double = 0.001, decay: Decay = NoDecay, beta1: Double = 0.9, beta2: Double = 0.999, useNesterov: Boolean = false, epsilon: Double = 1e-8, useLocking: Boolean = false, learningRateSummaryTag: String = null, name: String = "Adam") extends Optimizer with Product with Serializable

Linear Supertypes
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. Adam
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. Optimizer
  7. AnyRef
  8. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new Adam(learningRate: Double = 0.001, decay: Decay = NoDecay, beta1: Double = 0.9, beta2: Double = 0.999, useNesterov: Boolean = false, epsilon: Double = 1e-8, useLocking: Boolean = false, learningRateSummaryTag: String = null, name: String = "Adam")

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def applyDense(gradient: Output, variable: variables.Variable, iteration: Option[variables.Variable]): Op

    Permalink
    Definition Classes
    AdamOptimizer
  5. def applyGradients(gradientsAndVariables: Seq[(OutputLike, variables.Variable)], iteration: Option[variables.Variable] = None, name: String = this.name): Op

    Permalink
    Definition Classes
    Optimizer
  6. def applySparse(gradient: OutputIndexedSlices, variable: variables.Variable, iteration: Option[variables.Variable]): Op

    Permalink
    Definition Classes
    AdamOptimizer
  7. def applySparseDuplicateIndices(gradient: OutputIndexedSlices, variable: variables.Variable, iteration: Option[variables.Variable]): Op

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  8. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  9. val beta1: Double

    Permalink
  10. val beta2: Double

    Permalink
  11. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  12. def computeGradients(loss: Output, lossGradients: Seq[OutputLike] = null, variables: Set[variables.Variable] = null, gradientsGatingMethod: GatingMethod = Gradients.OpGating, gradientsAggregationMethod: AggregationMethod = Gradients.AddAggregationMethod, colocateGradientsWithOps: Boolean = false): Seq[(OutputLike, variables.Variable)]

    Permalink
    Definition Classes
    Optimizer
  13. def createSlots(variables: Seq[variables.Variable]): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    AdamOptimizer
  14. val decay: Decay

    Permalink
  15. val epsilon: Double

    Permalink
  16. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  17. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  18. def finish(updateOps: Set[Op], nameScope: String): Op

    Permalink
    Attributes
    protected
    Definition Classes
    AdamOptimizer
  19. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  20. def getSlot(name: String, variable: variables.Variable): variables.Variable

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  21. def getSlot(name: String, variable: variables.Variable, initializer: Initializer, shape: core.Shape, dataType: types.DataType, variableScope: String): variables.Variable

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  22. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  23. val learningRate: Double

    Permalink
  24. val learningRateSummaryTag: String

    Permalink
  25. def minimize(loss: Output, lossGradients: Seq[OutputLike] = null, variables: Set[variables.Variable] = null, gradientsGatingMethod: GatingMethod = Gradients.OpGating, gradientsAggregationMethod: AggregationMethod = Gradients.AddAggregationMethod, colocateGradientsWithOps: Boolean = false, iteration: Option[variables.Variable] = None, name: String = "Minimize"): Op

    Permalink
    Definition Classes
    Optimizer
  26. val name: String

    Permalink
    Definition Classes
    AdamOptimizer
  27. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  28. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  29. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  30. def prepare(iteration: Option[variables.Variable]): Unit

    Permalink
    Definition Classes
    AdamOptimizer
  31. def slotNames: Set[String]

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  32. val slots: Map[String, Map[variables.Variable, variables.Variable]]

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  33. val supportedDataTypes: Set[types.DataType]

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer
  34. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  35. val useLocking: Boolean

    Permalink
    Definition Classes
    AdamOptimizer
  36. val useNesterov: Boolean

    Permalink
  37. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  38. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  39. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  40. def zerosSlot(name: String, variable: variables.Variable, variableScope: String): variables.Variable

    Permalink
    Attributes
    protected
    Definition Classes
    Optimizer

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from Optimizer

Inherited from AnyRef

Inherited from Any

Ungrouped