object DifferentiableFloat
A namespace of common operators for Float layers.
Author:
杨博 (Yang Bo) <[email protected]>
- Alphabetic
- By Inheritance
- DifferentiableFloat
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Type Members
- final class FloatLayerOps [Input <: Tape] extends AnyRef
- implicit final class NativeFloatOps extends AnyRef
- trait OptimizerFactory extends AnyRef
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
implicit
def
Float*Float[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers.
Returns a Case that accepts two Float Layers.
The returned
Case
is used by the polymorphic function *, which is called in MathOps.import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { inputFloatLayer * anotherFloatLayer }
Example: -
implicit
def
Float+Float[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers.
Returns a Case that accepts two Float Layers.
The returned
Case
is used by the polymorphic function +, which is called in MathOps.import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods.+(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
Float-Float[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers.
Returns a Case that accepts two Float Layers. The returned
Case
is used by the polymorphic function -, which is called in MathOps.import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods.-(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
Float/Float[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers.
Returns a Case that accepts two Float Layers.
The returned
Case
is used by the polymorphic function /, which is called in MathOps.import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods./(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
abs(Float)[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts Float Layer for the polymorphic function abs
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
implicit
def
exp(Float)[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts Float Layer for the polymorphic function exp
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
- implicit def floatToLiteral: Aux[Float, Float, Float]
-
implicit
def
floatTrainable: Trainable[Float, Float]
- See also
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
implicit
def
log(Float)[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts Float Layer for the polymorphic function log
-
implicit
def
max(Float,Float)[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers for the polymorphic function max
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathFunctions.max(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
min(Float,Float)[Input <: Tape]: Aux[Aux[Input, Tape], Aux[Input, Tape], Aux[Input, Tape]]
Returns a Case that accepts two Float Layers for the polymorphic function min
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathFunctions.min(inputFloatLayer,anotherFloatLayer) }
Example: -
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
implicit
def
toFloatLayerOps[From, Input <: Tape](from: From)(implicit toLayer: OfPlaceholder[From, Input, FloatPlaceholder]): FloatLayerOps[Input]
Implicitly converts any layer to FloatLayerOps, which enables common methods for Float layers.
Implicitly converts any layer to FloatLayerOps, which enables common methods for Float layers.
import com.thoughtworks.deeplearning.DifferentiableFloat._
Example: -
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
- object Layers
- object OptimizerFactory
-
object
Optimizers
Optimizers of Float.
Optimizers of Float.
implicit val optimizerFactory = new DifferentiableFloat.OptimizerFactory { override def floatOptimizer(weight: Weight): Optimizer = { new LearningRate with L2Regularization { var learningRate = 0.00003 override protected def l2Regularization: Float = 0.003 override protected def currentLearningRate(): Float = { learningRate * 0.75 learningRate } } } }
Example: