Packages

  • package root
    Definition Classes
    root
  • package io
    Definition Classes
    root
  • package tarantool
    Definition Classes
    io
  • package spark
    Definition Classes
    tarantool
  • package connector

    Tarantool connector for Apache Spark.

    Tarantool connector for Apache Spark.

    Call tarantoolSpace method on the SparkContext object to create a TarantoolRDD exposing Tarantool space as a Spark RDD.

    Example:

    Execute the following on a Cartridge router node (the tarantool/crud module must be installed):

    local crud = require('crud')
    
    crud.insert('test_space', {1, nil, 'a1', 'Don Quixote', 'Miguel de Cervantes', 1605})
    crud.insert('test_space', {2, nil, 'a2', 'The Great Gatsby', 'F. Scott Fitzgerald', 1925})
    crud.insert('test_space', {3, nil, 'a3', 'War and Peace', 'Leo Tolstoy', 1869})

    Write the following in your Java client code:

    import io.tarantool.spark.connector._
    
    val sparkMasterHost = "127.0.0.1"
    val tarantoolRouterAddress = "127.0.0.1:3301"
    val space = "test_space"
    
    // Populate the Spark config with the address of a Cartridge router node and credentials:
    val conf = new SparkConf(true)
    conf.set ("tarantool.username", "admin")
    conf.set ("tarantool.password", "testapp-cluster-cookie")
    conf.set ("tarantool.hosts", tarantoolRouterAddress)
    
    // Connect to the Spark cluster:
    val sc = new SparkContext("spark://" + sparkMasterHost + ":7077", "example", conf)
    
    // Read the space and print its contents:
    val rdd = sc.tarantoolSpace(space)
    rdd.toArray().foreach(println)
    
    sc.stop()
    Definition Classes
    spark
  • package rdd
    Definition Classes
    connector
  • package converter
    Definition Classes
    rdd
  • TarantoolBaseRDD
  • TarantoolJavaRDD
  • TarantoolReadRDD
  • TarantoolWriteRDD

class TarantoolReadRDD[R] extends RDD[R] with TarantoolBaseRDD

Tarantool RDD implementation for read operations

R

target POJO type

Linear Supertypes
TarantoolBaseRDD, RDD[R], org.apache.spark.internal.Logging, Serializable, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TarantoolReadRDD
  2. TarantoolBaseRDD
  3. RDD
  4. Logging
  5. Serializable
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. def ++(other: RDD[R]): RDD[R]
    Definition Classes
    RDD
  4. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  5. def aggregate[U](zeroValue: U)(seqOp: (U, R) => U, combOp: (U, U) => U)(implicit arg0: ClassTag[U]): U
    Definition Classes
    RDD
  6. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  7. def barrier(): RDDBarrier[R]
    Definition Classes
    RDD
    Annotations
    @Experimental() @Since("2.4.0")
  8. def cache(): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  9. def cartesian[U](other: RDD[U])(implicit arg0: ClassTag[U]): RDD[(R, U)]
    Definition Classes
    RDD
  10. def checkpoint(): Unit
    Definition Classes
    RDD
  11. def cleanShuffleDependencies(blocking: Boolean): Unit
    Definition Classes
    RDD
    Annotations
    @DeveloperApi() @Since("3.1.0")
  12. def clearDependencies(): Unit
    Attributes
    protected
    Definition Classes
    RDD
  13. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native()
  14. def coalesce(numPartitions: Int, shuffle: Boolean, partitionCoalescer: Option[PartitionCoalescer])(implicit ord: Ordering[R]): RDD[R]
    Definition Classes
    RDD
  15. def collect[U](f: PartialFunction[R, U])(implicit arg0: ClassTag[U]): RDD[U]
    Definition Classes
    RDD
  16. def collect(): Array[R]
    Definition Classes
    RDD
  17. def compute(split: Partition, context: TaskContext): Iterator[R]
    Definition Classes
    TarantoolReadRDD → RDD
  18. val conditions: Conditions
  19. def context: SparkContext
    Definition Classes
    RDD
  20. def count(): Long
    Definition Classes
    RDD
  21. def countApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble]
    Definition Classes
    RDD
  22. def countApproxDistinct(relativeSD: Double): Long
    Definition Classes
    RDD
  23. def countApproxDistinct(p: Int, sp: Int): Long
    Definition Classes
    RDD
  24. def countByValue()(implicit ord: Ordering[R]): Map[R, Long]
    Definition Classes
    RDD
  25. def countByValueApprox(timeout: Long, confidence: Double)(implicit ord: Ordering[R]): PartialResult[Map[R, BoundedDouble]]
    Definition Classes
    RDD
  26. final def dependencies: Seq[Dependency[_]]
    Definition Classes
    RDD
  27. def distinct(): RDD[R]
    Definition Classes
    RDD
  28. def distinct(numPartitions: Int)(implicit ord: Ordering[R]): RDD[R]
    Definition Classes
    RDD
  29. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  30. def equals(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef → Any
  31. def filter(f: (R) => Boolean): RDD[R]
    Definition Classes
    RDD
  32. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable])
  33. def first(): R
    Definition Classes
    RDD
  34. def firstParent[U](implicit arg0: ClassTag[U]): RDD[U]
    Attributes
    protected[org.apache.spark]
    Definition Classes
    RDD
  35. def flatMap[U](f: (R) => TraversableOnce[U])(implicit arg0: ClassTag[U]): RDD[U]
    Definition Classes
    RDD
  36. def fold(zeroValue: R)(op: (R, R) => R): R
    Definition Classes
    RDD
  37. def foreach(f: (R) => Unit): Unit
    Definition Classes
    RDD
  38. def foreachPartition(f: (Iterator[R]) => Unit): Unit
    Definition Classes
    RDD
  39. def getCheckpointFile: Option[String]
    Definition Classes
    RDD
  40. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  41. def getDependencies: Seq[Dependency[_]]
    Attributes
    protected
    Definition Classes
    RDD
  42. final def getNumPartitions: Int
    Definition Classes
    RDD
    Annotations
    @Since("1.6.0")
  43. def getOutputDeterministicLevel: org.apache.spark.rdd.DeterministicLevel.Value
    Attributes
    protected
    Definition Classes
    RDD
    Annotations
    @DeveloperApi()
  44. def getPartitions: Array[Partition]
    Attributes
    protected
    Definition Classes
    TarantoolReadRDD → RDD
  45. def getPreferredLocations(split: Partition): Seq[String]
    Attributes
    protected
    Definition Classes
    RDD
  46. def getResourceProfile(): ResourceProfile
    Definition Classes
    RDD
    Annotations
    @Experimental() @Since("3.1.0")
  47. def getStorageLevel: StorageLevel
    Definition Classes
    RDD
  48. def glom(): RDD[Array[R]]
    Definition Classes
    RDD
  49. def groupBy[K](f: (R) => K, p: Partitioner)(implicit kt: ClassTag[K], ord: Ordering[K]): RDD[(K, Iterable[R])]
    Definition Classes
    RDD
  50. def groupBy[K](f: (R) => K, numPartitions: Int)(implicit kt: ClassTag[K]): RDD[(K, Iterable[R])]
    Definition Classes
    RDD
  51. def groupBy[K](f: (R) => K)(implicit kt: ClassTag[K]): RDD[(K, Iterable[R])]
    Definition Classes
    RDD
  52. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  53. val id: Int
    Definition Classes
    RDD
  54. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  55. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  56. def intersection(other: RDD[R], numPartitions: Int): RDD[R]
    Definition Classes
    RDD
  57. def intersection(other: RDD[R], partitioner: Partitioner)(implicit ord: Ordering[R]): RDD[R]
    Definition Classes
    RDD
  58. def intersection(other: RDD[R]): RDD[R]
    Definition Classes
    RDD
  59. lazy val isBarrier_: Boolean
    Attributes
    protected
    Definition Classes
    RDD
    Annotations
    @transient()
  60. def isCheckpointed: Boolean
    Definition Classes
    RDD
  61. def isEmpty(): Boolean
    Definition Classes
    RDD
  62. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  63. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  64. final def iterator(split: Partition, context: TaskContext): Iterator[R]
    Definition Classes
    RDD
  65. def keyBy[K](f: (R) => K): RDD[(K, R)]
    Definition Classes
    RDD
  66. def localCheckpoint(): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  67. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  68. def logDebug(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  69. def logDebug(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  70. def logError(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  71. def logError(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  72. def logInfo(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  73. def logInfo(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  74. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  75. def logTrace(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  76. def logTrace(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  77. def logWarning(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  78. def logWarning(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  79. def map[U](f: (R) => U)(implicit arg0: ClassTag[U]): RDD[U]
    Definition Classes
    RDD
  80. def mapPartitions[U](f: (Iterator[R]) => Iterator[U], preservesPartitioning: Boolean)(implicit arg0: ClassTag[U]): RDD[U]
    Definition Classes
    RDD
  81. def mapPartitionsWithIndex[U](f: (Int, Iterator[R]) => Iterator[U], preservesPartitioning: Boolean)(implicit arg0: ClassTag[U]): RDD[U]
    Definition Classes
    RDD
  82. def max()(implicit ord: Ordering[R]): R
    Definition Classes
    RDD
  83. implicit val messagePackMapper: MessagePackMapper
  84. def min()(implicit ord: Ordering[R]): R
    Definition Classes
    RDD
  85. var name: String
    Definition Classes
    RDD
  86. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  87. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  88. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  89. def parent[U](j: Int)(implicit arg0: ClassTag[U]): RDD[U]
    Attributes
    protected[org.apache.spark]
    Definition Classes
    RDD
  90. val partitioner: Option[Partitioner]
    Definition Classes
    RDD
  91. final def partitions: Array[Partition]
    Definition Classes
    RDD
  92. def persist(): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  93. def persist(newLevel: StorageLevel): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  94. def pipe(command: Seq[String], env: Map[String, String], printPipeContext: ((String) => Unit) => Unit, printRDDElement: (R, (String) => Unit) => Unit, separateWorkingDir: Boolean, bufferSize: Int, encoding: String): RDD[String]
    Definition Classes
    RDD
  95. def pipe(command: String, env: Map[String, String]): RDD[String]
    Definition Classes
    RDD
  96. def pipe(command: String): RDD[String]
    Definition Classes
    RDD
  97. final def preferredLocations(split: Partition): Seq[String]
    Definition Classes
    RDD
  98. def randomSplit(weights: Array[Double], seed: Long): Array[RDD[R]]
    Definition Classes
    RDD
  99. val readConfig: ReadConfig
  100. def reduce(f: (R, R) => R): R
    Definition Classes
    RDD
  101. def repartition(numPartitions: Int)(implicit ord: Ordering[R]): RDD[R]
    Definition Classes
    RDD
  102. def sample(withReplacement: Boolean, fraction: Double, seed: Long): RDD[R]
    Definition Classes
    RDD
  103. def saveAsObjectFile(path: String): Unit
    Definition Classes
    RDD
  104. def saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]): Unit
    Definition Classes
    RDD
  105. def saveAsTextFile(path: String): Unit
    Definition Classes
    RDD
  106. val sc: SparkContext
  107. def setName(_name: String): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  108. def sortBy[K](f: (R) => K, ascending: Boolean, numPartitions: Int)(implicit ord: Ordering[K], ctag: ClassTag[K]): RDD[R]
    Definition Classes
    RDD
  109. val space: String

    Tarantool space name.

    Tarantool space name.

    Definition Classes
    TarantoolReadRDDTarantoolBaseRDD
  110. def sparkContext: SparkContext
    Definition Classes
    RDD
  111. def subtract(other: RDD[R], p: Partitioner)(implicit ord: Ordering[R]): RDD[R]
    Definition Classes
    RDD
  112. def subtract(other: RDD[R], numPartitions: Int): RDD[R]
    Definition Classes
    RDD
  113. def subtract(other: RDD[R]): RDD[R]
    Definition Classes
    RDD
  114. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  115. def take(num: Int): Array[R]
    Definition Classes
    RDD
  116. def takeOrdered(num: Int)(implicit ord: Ordering[R]): Array[R]
    Definition Classes
    RDD
  117. def takeSample(withReplacement: Boolean, num: Int, seed: Long): Array[R]
    Definition Classes
    RDD
  118. def toDebugString: String
    Definition Classes
    RDD
  119. def toJavaRDD(): JavaRDD[R]
    Definition Classes
    RDD
  120. def toLocalIterator: Iterator[R]
    Definition Classes
    RDD
  121. def toString(): String
    Definition Classes
    RDD → AnyRef → Any
  122. def top(num: Int)(implicit ord: Ordering[R]): Array[R]
    Definition Classes
    RDD
  123. def treeAggregate[U](zeroValue: U, seqOp: (U, R) => U, combOp: (U, U) => U, depth: Int, finalAggregateOnExecutor: Boolean)(implicit arg0: ClassTag[U]): U
    Definition Classes
    RDD
  124. def treeAggregate[U](zeroValue: U)(seqOp: (U, R) => U, combOp: (U, U) => U, depth: Int)(implicit arg0: ClassTag[U]): U
    Definition Classes
    RDD
  125. def treeReduce(f: (R, R) => R, depth: Int): R
    Definition Classes
    RDD
  126. val tupleConverter: TupleConverter[R]
  127. def union(other: RDD[R]): RDD[R]
    Definition Classes
    RDD
  128. def unpersist(blocking: Boolean): TarantoolReadRDD.this.type
    Definition Classes
    RDD
  129. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  130. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  131. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  132. def withResources(rp: ResourceProfile): TarantoolReadRDD.this.type
    Definition Classes
    RDD
    Annotations
    @Experimental() @Since("3.1.0")
  133. def zip[U](other: RDD[U])(implicit arg0: ClassTag[U]): RDD[(R, U)]
    Definition Classes
    RDD
  134. def zipPartitions[B, C, D, V](rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D])(f: (Iterator[R], Iterator[B], Iterator[C], Iterator[D]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[D], arg3: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  135. def zipPartitions[B, C, D, V](rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D], preservesPartitioning: Boolean)(f: (Iterator[R], Iterator[B], Iterator[C], Iterator[D]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[D], arg3: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  136. def zipPartitions[B, C, V](rdd2: RDD[B], rdd3: RDD[C])(f: (Iterator[R], Iterator[B], Iterator[C]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  137. def zipPartitions[B, C, V](rdd2: RDD[B], rdd3: RDD[C], preservesPartitioning: Boolean)(f: (Iterator[R], Iterator[B], Iterator[C]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[C], arg2: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  138. def zipPartitions[B, V](rdd2: RDD[B])(f: (Iterator[R], Iterator[B]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  139. def zipPartitions[B, V](rdd2: RDD[B], preservesPartitioning: Boolean)(f: (Iterator[R], Iterator[B]) => Iterator[V])(implicit arg0: ClassTag[B], arg1: ClassTag[V]): RDD[V]
    Definition Classes
    RDD
  140. def zipWithIndex(): RDD[(R, Long)]
    Definition Classes
    RDD
  141. def zipWithUniqueId(): RDD[(R, Long)]
    Definition Classes
    RDD

Inherited from TarantoolBaseRDD

Inherited from RDD[R]

Inherited from org.apache.spark.internal.Logging

Inherited from Serializable

Inherited from AnyRef

Inherited from Any

Ungrouped