trait
Implementation extends AnyRef
Type Members
-
abstract
type
Cache
-
abstract
type
Dataset
-
abstract
type
Row
Abstract Value Members
-
abstract
def
calcDatasetRow(cache: Cache, i: Int): Row
-
abstract
def
dumpDatasetBytes(os: OutputStream, dataset: Dataset): Unit
-
abstract
def
extractDatasetRow(dataset: Dataset, i: Int): Row
-
abstract
def
hashimoto(seedBytes: Array[Byte], fullSize: Long, datasetAccessor: (Int) ⇒ Row): Hashimoto
-
abstract
def
mkCache(cacheSize: Long, seed: Array[Byte]): Cache
-
abstract
def
readDatasetBytes(is: InputStream, mbInitSize: Option[Long]): Dataset
-
implicit abstract
val
rowClassTag: ClassTag[Row]
-
abstract
def
toDataset(array: Array[Row]): Dataset
-
abstract
def
writeRow(row: Row): Array[Byte]
Concrete Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
blocksRemainingInEpoch(blockNumber: Long): Long
-
def
cacheDataset(seed: Array[Byte], dataset: Dataset): Failable[Unit]
-
def
calcDataset(cache: Cache, fullSize: Long)(implicit mf: Factory): Dataset
-
def
calcDatasetForBlock(blockNumber: Long)(implicit mf: Factory): Dataset
-
def
calcDatasetForEpoch(epochNumber: Long)(implicit mf: Factory): Dataset
-
final
def
calcDatasetParallel(cache: Cache, fullSize: Long)(mf: Factory): Dataset
-
final
def
calcDatasetSequential(cache: Cache, fullSize: Long)(mf: Factory): Dataset
-
def
clone(): AnyRef
-
final
def
datasetLen(fullSize: Long): Int
-
def
doCalcDataset(cache: Cache, fullSize: Long)(mf: Factory): Dataset
-
def
epochFromBlock(blockNumber: Long): Long
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
getCacheSizeForBlock(blockNumber: Long): Long
-
def
getCacheSizeForEpoch(epochNumber: Long): Long
-
final
def
getClass(): Class[_]
-
def
getFullSizeForBlock(blockNumber: Long): Long
-
def
getFullSizeForEpoch(epochNumber: Long): Long
-
def
hashCode(): Int
-
-
-
final
def
isInstanceOf[T0]: Boolean
-
val
isParallel: Boolean
-
def
loadDagFile(seed: Array[Byte]): Failable[Dataset]
-
def
mkCacheForBlock(blockNumber: Long): Cache
-
def
mkCacheForEpoch(epochNumber: Long): Cache
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
precomputeCacheDatasetForBlockNumber(blockNumber: Long)(implicit mf: Factory): Failable[Unit]
-
def
precomputeCacheDatasetForEpochNumber(epochNumber: Long)(implicit mf: Factory): Failable[Unit]
-
def
readDagFile(is: InputStream, mbFileLength: Option[Long]): Dataset
-
def
requireValidInt(l: Long): Int
-
def
requireValidLong(bi: BigInt): Long
-
def
streamDagFileForBlockNumber(blockNumber: Long, file: Option[File])(implicit mf: Factory): Failable[Unit]
-
def
streamDagFileForBlockNumber(blockNumber: Long)(implicit mf: Factory): Failable[Unit]
-
def
streamDagFileForEpochNumber(epochNumber: Long, mbSeed: Option[Array[Byte]], mbCache: Option[Cache], mbFile: Option[File])(implicit mf: Factory): Failable[Unit]
-
def
streamDagFileForEpochNumber(epochNumber: Long, mbFile: Option[File])(implicit mf: Factory): Failable[Unit]
-
def
streamDagFileForEpochNumber(epochNumber: Long)(implicit mf: Factory): Failable[Unit]
-
def
streamDatasetAsDagFile(os: OutputStream, cache: Cache, fullSize: Long)(implicit mf: Factory): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
def
truncatedHeaderHash(header: Header): Keccak256
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
final
def
wait(): Unit
-
def
writeDagFile(os: OutputStream, dataset: Dataset): Unit
Deprecated Value Members
-
def
finalize(): Unit
Inherited from AnyRef
Inherited from Any