ammonite.spark.Spark

SparkContext

class SparkContext extends org.apache.spark.SparkContext

Linear Supertypes
org.apache.spark.SparkContext, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparkContext
  2. SparkContext
  3. Logging
  4. AnyRef
  5. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkContext(sparkConf: SparkConf)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def accumulable[T, R](initialValue: T, name: String)(implicit param: AccumulableParam[T, R]): Accumulable[T, R]

    Definition Classes
    SparkContext
  7. def accumulable[T, R](initialValue: T)(implicit param: AccumulableParam[T, R]): Accumulable[T, R]

    Definition Classes
    SparkContext
  8. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Definition Classes
    SparkContext
  9. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  10. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  11. def addFile(path: String): Unit

    Definition Classes
    SparkContext
  12. def addJar(path: String): Unit

    Definition Classes
    SparkContext
  13. def addSparkListener(listener: SparkListener): Unit

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  14. val appName: String

    Definition Classes
    SparkContext
  15. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  16. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Definition Classes
    SparkContext
  17. def cancelAllJobs(): Unit

    Definition Classes
    SparkContext
  18. def cancelJobGroup(groupId: String): Unit

    Definition Classes
    SparkContext
  19. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  20. def clearCallSite(): Unit

    Definition Classes
    SparkContext
  21. def clearJobGroup(): Unit

    Definition Classes
    SparkContext
  22. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  23. def defaultMinPartitions: Int

    Definition Classes
    SparkContext
  24. def defaultParallelism: Int

    Definition Classes
    SparkContext
  25. def emptyRDD[T](implicit arg0: ClassTag[T]): EmptyRDD[T]

    Definition Classes
    SparkContext
  26. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  27. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  28. val files: Seq[String]

    Definition Classes
    SparkContext
  29. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  30. def getAllPools: Seq[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  31. def getCheckpointDir: Option[String]

    Definition Classes
    SparkContext
  32. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  33. def getConf: SparkConf

    Definition Classes
    SparkContext
  34. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Definition Classes
    SparkContext
  35. def getExecutorStorageStatus: Array[StorageStatus]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  36. def getLocalProperty(key: String): String

    Definition Classes
    SparkContext
  37. def getPersistentRDDs: Map[Int, RDD[_]]

    Definition Classes
    SparkContext
  38. def getPoolForName(pool: String): Option[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  39. def getRDDStorageInfo: Array[RDDInfo]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  40. def getSchedulingMode: SchedulingMode

    Definition Classes
    SparkContext
  41. val hadoopConfiguration: Configuration

    Definition Classes
    SparkContext
  42. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  43. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  44. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  45. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  46. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  47. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  48. val isLocal: Boolean

    Definition Classes
    SparkContext
  49. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  50. val jars: Seq[String]

    Definition Classes
    SparkContext
  51. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  52. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  53. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  54. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  55. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  56. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  57. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  58. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  59. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  60. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  61. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  62. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  64. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  65. val master: String

    Definition Classes
    SparkContext
  66. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  67. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Definition Classes
    SparkContext
  68. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  69. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  70. final def notify(): Unit

    Definition Classes
    AnyRef
  71. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  72. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  73. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  74. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  75. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  76. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  77. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  78. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  79. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  80. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  81. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  82. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  83. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  84. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  85. def setCallSite(shortCallSite: String): Unit

    Definition Classes
    SparkContext
  86. def setCheckpointDir(directory: String): Unit

    Definition Classes
    SparkContext
  87. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Definition Classes
    SparkContext
  88. def setLocalProperty(key: String, value: String): Unit

    Definition Classes
    SparkContext
  89. val sparkUser: String

    Definition Classes
    SparkContext
  90. val startTime: Long

    Definition Classes
    SparkContext
  91. def stop(): Unit

    Definition Classes
    SparkContext
  92. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  93. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  94. val tachyonFolderName: String

    Definition Classes
    SparkContext
  95. def textFile(path: String, minPartitions: Int): RDD[String]

    Definition Classes
    SparkContext
  96. def toString(): String

    Definition Classes
    SparkContext → AnyRef → Any
  97. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  98. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  99. def version: String

    Definition Classes
    SparkContext
  100. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  101. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  102. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  103. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Definition Classes
    SparkContext

Deprecated Value Members

  1. def clearFiles(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding files no longer creates local copies that need to be deleted

  2. def clearJars(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding jars no longer creates local copies that need to be deleted

  3. def defaultMinSplits: Int

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) use defaultMinPartitions

  4. def initLocalProperties(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) Properties no longer need to be explicitly initialized.

  5. def setJobDescription(value: String): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 0.8.1) use setJobGroup

Inherited from org.apache.spark.SparkContext

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped