ammonite.spark.Spark

SparkContext

class SparkContext extends org.apache.spark.SparkContext

Linear Supertypes
org.apache.spark.SparkContext, ExecutorAllocationClient, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparkContext
  2. SparkContext
  3. ExecutorAllocationClient
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkContext(sparkConf: SparkConf)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  7. def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Definition Classes
    SparkContext
  8. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Definition Classes
    SparkContext
  9. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  10. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Definition Classes
    SparkContext
  11. def addFile(path: String): Unit

    Definition Classes
    SparkContext
  12. def addJar(path: String): Unit

    Definition Classes
    SparkContext
  13. def addSparkListener(listener: SparkListener): Unit

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  14. val appName: String

    Definition Classes
    SparkContext
  15. val applicationId: String

    Definition Classes
    SparkContext
  16. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  17. def binaryFiles(path: String, minPartitions: Int): RDD[(String, PortableDataStream)]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  18. def binaryRecords(path: String, recordLength: Int, conf: Configuration): RDD[Array[Byte]]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  19. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Definition Classes
    SparkContext
  20. def cancelAllJobs(): Unit

    Definition Classes
    SparkContext
  21. def cancelJobGroup(groupId: String): Unit

    Definition Classes
    SparkContext
  22. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  23. def clearCallSite(): Unit

    Definition Classes
    SparkContext
  24. def clearJobGroup(): Unit

    Definition Classes
    SparkContext
  25. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  26. def defaultMinPartitions: Int

    Definition Classes
    SparkContext
  27. def defaultParallelism: Int

    Definition Classes
    SparkContext
  28. def emptyRDD[T](implicit arg0: ClassTag[T]): EmptyRDD[T]

    Definition Classes
    SparkContext
  29. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  30. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  31. val files: Seq[String]

    Definition Classes
    SparkContext
  32. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  33. def getAllPools: Seq[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  34. def getCheckpointDir: Option[String]

    Definition Classes
    SparkContext
  35. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  36. def getConf: SparkConf

    Definition Classes
    SparkContext
  37. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Definition Classes
    SparkContext
  38. def getExecutorStorageStatus: Array[StorageStatus]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  39. def getLocalProperty(key: String): String

    Definition Classes
    SparkContext
  40. def getPersistentRDDs: Map[Int, RDD[_]]

    Definition Classes
    SparkContext
  41. def getPoolForName(pool: String): Option[Schedulable]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  42. def getRDDStorageInfo: Array[RDDInfo]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  43. def getSchedulingMode: SchedulingMode

    Definition Classes
    SparkContext
  44. val hadoopConfiguration: Configuration

    Definition Classes
    SparkContext
  45. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  46. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  47. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  48. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  49. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  50. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  51. val isLocal: Boolean

    Definition Classes
    SparkContext
  52. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  53. val jars: Seq[String]

    Definition Classes
    SparkContext
  54. def killExecutor(executorId: String): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  55. def killExecutors(executorIds: Seq[String]): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  56. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  57. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  58. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  59. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  60. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  61. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  62. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  63. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  64. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  65. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  66. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  67. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  68. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  69. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  70. val master: String

    Definition Classes
    SparkContext
  71. val metricsSystem: MetricsSystem

    Definition Classes
    SparkContext
  72. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  73. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Definition Classes
    SparkContext
  74. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Definition Classes
    SparkContext
  75. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  76. final def notify(): Unit

    Definition Classes
    AnyRef
  77. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  78. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  79. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  80. def requestExecutors(numAdditionalExecutors: Int): Boolean

    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  81. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  82. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  83. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  84. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  85. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  86. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  87. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Definition Classes
    SparkContext
  88. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Definition Classes
    SparkContext
  89. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  90. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Definition Classes
    SparkContext
  91. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Definition Classes
    SparkContext
  92. def setCallSite(shortCallSite: String): Unit

    Definition Classes
    SparkContext
  93. def setCheckpointDir(directory: String): Unit

    Definition Classes
    SparkContext
  94. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Definition Classes
    SparkContext
  95. def setLocalProperty(key: String, value: String): Unit

    Definition Classes
    SparkContext
  96. val sparkUser: String

    Definition Classes
    SparkContext
  97. val startTime: Long

    Definition Classes
    SparkContext
  98. val statusTracker: SparkStatusTracker

    Definition Classes
    SparkContext
  99. def stop(): Unit

    Definition Classes
    SparkContext
  100. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  101. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  102. val tachyonFolderName: String

    Definition Classes
    SparkContext
  103. def textFile(path: String, minPartitions: Int): RDD[String]

    Definition Classes
    SparkContext
  104. def toString(): String

    Definition Classes
    SparkContext → AnyRef → Any
  105. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  106. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Definition Classes
    SparkContext
  107. def version: String

    Definition Classes
    SparkContext
  108. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  109. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  110. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  111. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Definition Classes
    SparkContext

Deprecated Value Members

  1. def clearFiles(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding files no longer creates local copies that need to be deleted

  2. def clearJars(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding jars no longer creates local copies that need to be deleted

  3. def defaultMinSplits: Int

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) use defaultMinPartitions

  4. def initLocalProperties(): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) Properties no longer need to be explicitly initialized.

  5. def setJobDescription(value: String): Unit

    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 0.8.1) use setJobGroup

Inherited from org.apache.spark.SparkContext

Inherited from ExecutorAllocationClient

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped