object DifferentiableFloat
Author:
杨博 (Yang Bo) <[email protected]>
- Alphabetic
- By Inheritance
- DifferentiableFloat
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Type Members
- final class FloatLayerOps [Input <: Batch] extends AnyRef
- implicit final class NativeFloatOps extends AnyRef
- trait OptimizerFactory extends AnyRef
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
implicit
def
Float*Float[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathMethods.*.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.*
Returns a Poly.MathMethods.*.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.*
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods.*(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
Float+Float[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathMethods.+.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.+
Returns a Poly.MathMethods.+.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.+
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods.+(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
Float-Float[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathMethods.-.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.-
Returns a Poly.MathMethods.-.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods.-
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods.-(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
Float/Float[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathMethods./.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods./
Returns a Poly.MathMethods./.Case that accepts two Float Layers for the polymorphic function Poly.MathMethods./
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathMethods./(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
abs(Float)[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathFunctions.abs.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.abs
Returns a Poly.MathFunctions.abs.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.abs
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic) = { Poly.MathFunctions.abs(inputFloatLayer) }
Example: -
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
implicit
def
exp(Float)[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathFunctions.exp.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.exp
Returns a Poly.MathFunctions.exp.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.exp
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic) = { Poly.MathFunctions.exp(inputFloatLayer) }
Example: -
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
- implicit def floatToLiteral: Aux[Float, Float, Float]
- implicit def floatTrainable: Trainable[Float, Float]
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
implicit
def
log(Float)[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathFunctions.log.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.log
Returns a Poly.MathFunctions.log.Case that accepts Float Layer for the polymorphic function Poly.MathFunctions.log
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic) = { Poly.MathFunctions.log(inputFloatLayer) }
Example: -
implicit
def
max(Float,Float)[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathFunctions.max.Case that accepts two Float Layers for the polymorphic function Poly.MathFunctions.max
Returns a Poly.MathFunctions.max.Case that accepts two Float Layers for the polymorphic function Poly.MathFunctions.max
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathFunctions.max(inputFloatLayer,anotherFloatLayer) }
Example: -
implicit
def
min(Float,Float)[Input <: Batch]: Aux[Aux[Input, Batch], Aux[Input, Batch], Aux[Input, Batch]]
Returns a Poly.MathFunctions.min.Case that accepts two Float Layers for the polymorphic function Poly.MathFunctions.min
Returns a Poly.MathFunctions.min.Case that accepts two Float Layers for the polymorphic function Poly.MathFunctions.min
import com.thoughtworks.deeplearning.DifferentiableFloat._ import com.thoughtworks.deeplearning.Symbolic def myNetwork(implicit inputFloatLayer: Float @Symbolic)(anotherFloatLayer: Float @Symbolic) = { Poly.MathFunctions.min(inputFloatLayer,anotherFloatLayer) }
Example: -
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
implicit
def
toFloatLayerOps[From, Input <: Batch](from: From)(implicit toLayer: OfPlaceholder[From, Input, FloatPlaceholder]): FloatLayerOps[Input]
A helper that contains common boilerplate code for all Float layers.
A helper that contains common boilerplate code for all Float layers.
import com.thoughtworks.deeplearning.DifferentiableFloat._
Example: -
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
- object Layers
- object OptimizerFactory
-
object
Optimizers
Optimizers of Float