ammonite.spark.Spark

SparkContext

class SparkContext extends org.apache.spark.SparkContext

Linear Supertypes
org.apache.spark.SparkContext, ExecutorAllocationClient, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparkContext
  2. SparkContext
  3. ExecutorAllocationClient
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkContext(sparkConf: SparkConf)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  7. def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  8. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Definition Classes
    SparkContext
  9. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  10. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  11. def addFile(path: String, recursive: Boolean): Unit

    Definition Classes
    SparkContext
  12. def addFile(path: String): Unit

    Definition Classes
    SparkContext
  13. def addJar(path: String): Unit

    Definition Classes
    SparkContext
  14. def addSparkListener(listener: SparkListener): Unit

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  15. val appName: String

    Definition Classes
    SparkContext
  16. val applicationId: String

    Definition Classes
    SparkContext
  17. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  18. def binaryFiles(path: String, minPartitions: Int): RDD[(String, PortableDataStream)]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  19. def binaryRecords(path: String, recordLength: Int, conf: Configuration): RDD[Array[Byte]]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  20. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Definition Classes
    SparkContext
  21. def cancelAllJobs(): Unit

    Definition Classes
    SparkContext
  22. def cancelJobGroup(groupId: String): Unit

    Definition Classes
    SparkContext
  23. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  24. def clearCallSite(): Unit

    Definition Classes
    SparkContext
  25. def clearJobGroup(): Unit

    Definition Classes
    SparkContext
  26. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  27. def defaultMinPartitions: Int

    Definition Classes
    SparkContext
  28. def defaultParallelism: Int

    Definition Classes
    SparkContext
  29. def emptyRDD[T](implicit arg0: ClassTag[T]): EmptyRDD[T]

    Definition Classes
    SparkContext
  30. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  31. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  32. val files: Seq[String]

    Definition Classes
    SparkContext
  33. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  34. def getAllPools: Seq[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  35. def getCheckpointDir: Option[String]

    Definition Classes
    SparkContext
  36. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  37. def getConf: SparkConf

    Definition Classes
    SparkContext
  38. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Definition Classes
    SparkContext
  39. def getExecutorStorageStatus: Array[StorageStatus]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  40. def getLocalProperty(key: String): String

    Definition Classes
    SparkContext
  41. def getPersistentRDDs: Map[Int, RDD[_]]

    Definition Classes
    SparkContext
  42. def getPoolForName(pool: String): Option[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  43. def getRDDStorageInfo: Array[RDDInfo]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  44. def getSchedulingMode: SchedulingMode

    Definition Classes
    SparkContext
  45. val hadoopConfiguration: Configuration

    Definition Classes
    SparkContext
  46. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  47. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  48. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  49. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  50. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  51. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  52. val isLocal: Boolean

    Definition Classes
    SparkContext
  53. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  54. val jars: Seq[String]

    Definition Classes
    SparkContext
  55. def killExecutor(executorId: String): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  56. def killExecutors(executorIds: Seq[String]): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  57. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  58. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  59. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  60. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  61. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  62. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  64. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  65. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  66. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  67. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  68. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  69. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  70. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  71. val master: String

    Definition Classes
    SparkContext
  72. val metricsSystem: MetricsSystem

    Definition Classes
    SparkContext
  73. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  74. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Definition Classes
    SparkContext
  75. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  76. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  77. final def notify(): Unit

    Definition Classes
    AnyRef
  78. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  79. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  80. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  81. def requestExecutors(numAdditionalExecutors: Int): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  82. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  83. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  84. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  85. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  86. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  87. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  88. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  89. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  90. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  91. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  92. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  93. def setCallSite(shortCallSite: String): Unit

    Definition Classes
    SparkContext
  94. def setCheckpointDir(directory: String): Unit

    Definition Classes
    SparkContext
  95. def setJobDescription(value: String): Unit

    Definition Classes
    SparkContext
  96. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Definition Classes
    SparkContext
  97. def setLocalProperty(key: String, value: String): Unit

    Definition Classes
    SparkContext
  98. val sparkUser: String

    Definition Classes
    SparkContext
  99. val startTime: Long

    Definition Classes
    SparkContext
  100. val statusTracker: SparkStatusTracker

    Definition Classes
    SparkContext
  101. def stop(): Unit

    Definition Classes
    SparkContext
  102. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  103. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  104. val tachyonFolderName: String

    Definition Classes
    SparkContext
  105. def textFile(path: String, minPartitions: Int): RDD[String]

    Definition Classes
    SparkContext
  106. def toString(): String

    Definition Classes
    SparkContext → AnyRef → Any
  107. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  108. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  109. def version: String

    Definition Classes
    SparkContext
  110. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  111. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  112. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  113. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Definition Classes
    SparkContext

Deprecated Value Members

  1. def clearFiles(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding files no longer creates local copies that need to be deleted

  2. def clearJars(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding jars no longer creates local copies that need to be deleted

  3. def defaultMinSplits: Int

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) use defaultMinPartitions

  4. def initLocalProperties(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) Properties no longer need to be explicitly initialized.

Inherited from org.apache.spark.SparkContext

Inherited from ExecutorAllocationClient

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped