object STen extends Serializable
Companion object of lamp.STen
- STen.fromDoubleArray, STen.fromLongArray, STen.fromFloatArray factory methods copy data from JVM arrays into off heap memory and create an STen instance
- There are similar factories which take SADDLE data structures
- Alphabetic
- By Inheritance
- STen
- Serializable
- Serializable
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Type Members
- implicit class OwnedSyntax extends AnyRef
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def addOut(out: STen, self: STen, other: STen, alpha: Double): Unit
- def addcmulOut(out: STen, self: STen, tensor1: STen, tensor2: STen, alpha: Double): Unit
- def addmmOut(out: STen, self: STen, mat1: STen, mat2: STen, beta: Double, alpha: Double): Unit
-
def
apply[S](vs: Double*)(implicit arg0: Sc[S]): STen
Returns a 1D tensor containing the given values
- def arange[S](start: Double, end: Double, step: Double, tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- def bmmOut(out: STen, self: STen, other: STen): Unit
- def cat[S](tensors: Seq[STen], dim: Long)(implicit arg0: Sc[S]): STen
- def catOut(out: STen, tensors: Seq[STen], dim: Int): Unit
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
val
dOptions: STenOptions
A tensor option specifying CPU and double
- def divOut(out: STen, self: STen, other: STen): Unit
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def eye[S](n: Int, m: Int, tensorOptions: STenOptions)(implicit arg0: Sc[S]): STen
- def eye[S](n: Int, tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
-
val
fOptions: STenOptions
A tensor option specifying CPU and float
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
free(value: Tensor): STen
Wraps a tensor without registering it to any scope.
Wraps a tensor without registering it to any scope.
Memory may leak.
-
def
fromDoubleArray[S](ar: Array[Double], dim: Seq[Long], device: Device, precision: FloatingPointPrecision)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromFile[S](path: String, offset: Long, length: Long, scalarTypeByte: Byte, pin: Boolean)(implicit arg0: Sc[S]): STen
Create tensor directly from file.
Create tensor directly from file. Memory maps a file into host memory. Data is not passed through the JVM. Returned tensor is always on the CPU device.
- path
file path
- offset
byte offset into the file. Must be page aligned (usually multiple of 4096)
- length
byte length of the data
- scalarTypeByte
scalar type (long=4,half=5,float=6,double=7)
- pin
if true the mapped segment will be page locked with mlock(2)
- returns
tensor on CPU
-
def
fromFloatArray[S](ar: Array[Float], dim: Seq[Long], device: Device)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromFloatMat[S](m: Mat[Float], device: Device)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromLongArray[S](ar: Array[Long], dim: Seq[Long], device: Device)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromLongMat[S](m: Mat[Long], cuda: Boolean = false)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromLongMat[S](m: Mat[Long], device: Device)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromLongVec[S](m: Vec[Long], cuda: Boolean = false)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromLongVec[S](m: Vec[Long], device: Device)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromMat[S](m: Mat[Double], device: Device, precision: FloatingPointPrecision)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromMat[S](m: Mat[Double], cuda: Boolean = false)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromVec[S](m: Vec[Double], device: Device, precision: FloatingPointPrecision)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
def
fromVec[S](m: Vec[Double], cuda: Boolean = false)(implicit arg0: Sc[S]): STen
Returns a tensor with the given content and shape on the given device
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def indexSelectOut(out: STen, self: STen, dim: Int, index: STen): Unit
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def l1_loss_backward[S](gradOutput: STen, self: STen, target: STen, reduction: Long)(implicit arg0: Sc[S]): STen
-
val
lOptions: STenOptions
A tensor option specifying CPU and long
- def linspace[S](start: Double, end: Double, steps: Long, tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def meanOut(out: STen, self: STen, dim: Seq[Int], keepDim: Boolean): Unit
- def mmOut(out: STen, self: STen, other: STen): Unit
- def mse_loss[S](self: STen, target: STen, reduction: Long)(implicit arg0: Sc[S]): STen
- def mse_loss_backward[S](gradOutput: STen, self: STen, target: STen, reduction: Long)(implicit arg0: Sc[S]): STen
- def mulOut(out: STen, self: STen, other: STen): Unit
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def normal[S](mean: Double, std: Double, size: Seq[Long], options: STenOptions)(implicit arg0: Sc[S]): STen
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def ones[S](size: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def onesLike[S](tensor: STen)(implicit arg0: Sc[S]): STen
- def onesLike[S](tensor: Tensor)(implicit arg0: Sc[S]): STen
-
def
owned(value: Tensor)(implicit scope: Scope): STen
Wraps an aten.Tensor and registering it to the given scope
- def powOut(out: STen, self: STen, other: STen): Unit
- def powOut(out: STen, self: STen, other: Double): Unit
- def rand[S](size: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def randint[S](low: Long, high: Long, size: Seq[Long], tensorOptions: STenOptions)(implicit arg0: Sc[S]): STen
- def randint[S](high: Long, size: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def randn[S](size: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def randperm[S](n: Long, tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def remainderOut(out: STen, self: STen, other: Double): Unit
- def remainderOut(out: STen, self: STen, other: STen): Unit
- def scalarDouble[S](value: Double, options: STenOptions)(implicit arg0: Sc[S]): STen
- def scalarLong(value: Long, options: STenOptions)(implicit scope: Scope): STen
- def softplus_backward[S](gradOutput: STen, self: STen, beta: Double, threshold: Double, output: STen)(implicit arg0: Sc[S]): STen
- def sparse_coo[S](indices: STen, values: STen, dim: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def stack[S](tensors: Seq[STen], dim: Long)(implicit arg0: Sc[S]): STen
- def subOut(out: STen, self: STen, other: STen, alpha: Double): Unit
- def sumOut(out: STen, self: STen, dim: Seq[Int], keepDim: Boolean): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- def tanh_backward[S](gradOutput: STen, output: STen)(implicit arg0: Sc[S]): STen
-
def
tensorsFromFile[S](path: String, offset: Long, length: Long, pin: Boolean, tensors: List[(Byte, Long, Long)])(implicit arg0: Sc[S]): Vector[STen]
Create tensors directly from file.
Create tensors directly from file. Memory maps a file into host memory. Data is not passed through the JVM. Returned tensor is always on the CPU device.
- path
file path
- offset
byte offset into the file. Must be page aligned (usually multiple of 4096)
- length
byte length of the data (all tensors in total)
- pin
if true the mapped segment will be page locked with mlock(2)
- tensors
list of tensors with (scalarType, byte offset, byte length), byte offset must be aligned to 8
- returns
tensor on CPU
-
def
toString(): String
- Definition Classes
- AnyRef → Any
- def to_dense_backward[S](gradOutput: STen, input: STen)(implicit arg0: Sc[S]): STen
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- def where[S](condition: Tensor, self: STen, other: STen)(implicit arg0: Sc[S]): STen
- def where[S](condition: STen, self: STen, other: STen)(implicit arg0: Sc[S]): STen
- def zeros[S](size: Seq[Long], tensorOptions: STenOptions = STen.dOptions)(implicit arg0: Sc[S]): STen
- def zerosLike[S](tensor: STen)(implicit arg0: Sc[S]): STen
- def zerosLike[S](tensor: Tensor)(implicit arg0: Sc[S]): STen