class SparkContextFunctions extends Serializable
Spark API for Tarantool. Provides Tarantool-specific methods on SparkContext
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- SparkContextFunctions
- Serializable
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- Protected
Instance Constructors
- new SparkContextFunctions(sc: SparkContext)
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val sc: SparkContext
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def tarantoolSpace[R](space: String, conditions: Conditions)(implicit ct: ClassTag[R], sparkContext: SparkContext = sc, readConfig: ReadConfig = ReadConfig(space).withConditions(conditions), tupleConverterFactory: TupleConverterFactory[R]): TarantoolReadRDD[R]
Load data from Tarantool space as
TarantoolRDD
, filtering it with conditions.Load data from Tarantool space as
TarantoolRDD
, filtering it with conditions.This method is made available on SparkContext by importing io.tarantool.spark._
Example:
local crud = require('crud') crud.insert('test_space', {1, nil, 'a1', 'Don Quixote', 'Miguel de Cervantes', 1605}) crud.insert('test_space', {2, nil, 'a2', 'The Great Gatsby', 'F. Scott Fitzgerald', 1925}) crud.insert('test_space', {3, nil, 'a3', 'War and Peace', 'Leo Tolstoy', 1869}) ... val rdd = sc.tarantoolSpace("test_space", Conditions.indexGreaterThan("id", Collections.singletonList(1))); rdd.first().getInteger("id"); // 1 rdd.first().getString("author"); // "Miguel de Cervantes"
- space
space name
- conditions
filtering conditions
- returns
TarantoolReadRDD
with tuples from the space
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()