Class

ammonite.spark.Spark

SparkContext

Related Doc: package Spark

Permalink

class SparkContext extends org.apache.spark.SparkContext

Linear Supertypes
org.apache.spark.SparkContext, ExecutorAllocationClient, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. SparkContext
  2. SparkContext
  3. ExecutorAllocationClient
  4. Logging
  5. AnyRef
  6. Any
  1. Hide All
  2. Show all
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkContext(sparkConf: SparkConf)

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def accumulable[R, T](initialValue: R, name: String)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
  5. def accumulable[R, T](initialValue: R)(implicit param: AccumulableParam[R, T]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
  6. def accumulableCollection[R, T](initialValue: R)(implicit arg0: (R) ⇒ Growable[T] with TraversableOnce[T] with Serializable, arg1: ClassTag[R]): Accumulable[R, T]

    Permalink
    Definition Classes
    SparkContext
  7. def accumulator[T](initialValue: T, name: String)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Permalink
    Definition Classes
    SparkContext
  8. def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]): Accumulator[T]

    Permalink
    Definition Classes
    SparkContext
  9. def addFile(path: String, recursive: Boolean): Unit

    Permalink
    Definition Classes
    SparkContext
  10. def addFile(path: String): Unit

    Permalink
    Definition Classes
    SparkContext
  11. def addJar(path: String): Unit

    Permalink
    Definition Classes
    SparkContext
  12. def addSparkListener(listener: SparkListener): Unit

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  13. val appName: String

    Permalink
    Definition Classes
    SparkContext
  14. val applicationId: String

    Permalink
    Definition Classes
    SparkContext
  15. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  16. def binaryFiles(path: String, minPartitions: Int): RDD[(String, PortableDataStream)]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  17. def binaryRecords(path: String, recordLength: Int, conf: Configuration): RDD[Array[Byte]]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  18. def broadcast[T](value: T)(implicit arg0: ClassTag[T]): Broadcast[T]

    Permalink
    Definition Classes
    SparkContext
  19. def cancelAllJobs(): Unit

    Permalink
    Definition Classes
    SparkContext
  20. def cancelJobGroup(groupId: String): Unit

    Permalink
    Definition Classes
    SparkContext
  21. def checkpointFile[T](path: String)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Attributes
    protected[org.apache.spark]
    Definition Classes
    SparkContext
  22. def clearCallSite(): Unit

    Permalink
    Definition Classes
    SparkContext
  23. def clearJobGroup(): Unit

    Permalink
    Definition Classes
    SparkContext
  24. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  25. def defaultMinPartitions: Int

    Permalink
    Definition Classes
    SparkContext
  26. def defaultParallelism: Int

    Permalink
    Definition Classes
    SparkContext
  27. def emptyRDD[T](implicit arg0: ClassTag[T]): EmptyRDD[T]

    Permalink
    Definition Classes
    SparkContext
  28. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  29. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  30. val files: Seq[String]

    Permalink
    Definition Classes
    SparkContext
  31. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  32. def getAllPools: Seq[Schedulable]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  33. def getCheckpointDir: Option[String]

    Permalink
    Definition Classes
    SparkContext
  34. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  35. def getConf: SparkConf

    Permalink
    Definition Classes
    SparkContext
  36. def getExecutorMemoryStatus: Map[String, (Long, Long)]

    Permalink
    Definition Classes
    SparkContext
  37. def getExecutorStorageStatus: Array[StorageStatus]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  38. def getLocalProperty(key: String): String

    Permalink
    Definition Classes
    SparkContext
  39. def getPersistentRDDs: Map[Int, RDD[_]]

    Permalink
    Definition Classes
    SparkContext
  40. def getPoolForName(pool: String): Option[Schedulable]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  41. def getRDDStorageInfo: Array[RDDInfo]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  42. def getSchedulingMode: SchedulingMode

    Permalink
    Definition Classes
    SparkContext
  43. val hadoopConfiguration: Configuration

    Permalink
    Definition Classes
    SparkContext
  44. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  45. def hadoopFile[K, V, F <: InputFormat[K, V]](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  46. def hadoopFile[K, V](path: String, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  47. def hadoopRDD[K, V](conf: JobConf, inputFormatClass: Class[_ <: InputFormat[K, V]], keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  48. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  49. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  50. val isLocal: Boolean

    Permalink
    Definition Classes
    SparkContext
  51. def isTraceEnabled(): Boolean

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  52. val jars: Seq[String]

    Permalink
    Definition Classes
    SparkContext
  53. def killExecutor(executorId: String): Boolean

    Permalink
    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  54. def killExecutors(executorIds: Seq[String]): Boolean

    Permalink
    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  55. def log: Logger

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  56. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  57. def logDebug(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  58. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  59. def logError(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  60. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  61. def logInfo(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  62. def logName: String

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  63. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  64. def logTrace(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  65. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  66. def logWarning(msg: ⇒ String): Unit

    Permalink
    Attributes
    protected
    Definition Classes
    Logging
  67. def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  68. def makeRDD[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  69. val master: String

    Permalink
    Definition Classes
    SparkContext
  70. val metricsSystem: MetricsSystem

    Permalink
    Definition Classes
    SparkContext
  71. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  72. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  73. def newAPIHadoopFile[K, V, F <: InputFormat[K, V]](path: String)(implicit km: ClassTag[K], vm: ClassTag[V], fm: ClassTag[F]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  74. def newAPIHadoopRDD[K, V, F <: InputFormat[K, V]](conf: Configuration, fClass: Class[F], kClass: Class[K], vClass: Class[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  75. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  76. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  77. def objectFile[T](path: String, minPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  78. def parallelize[T](seq: Seq[T], numSlices: Int)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  79. def requestExecutors(numAdditionalExecutors: Int): Boolean

    Permalink
    Definition Classes
    SparkContext → ExecutorAllocationClient
    Annotations
    @DeveloperApi()
  80. def runApproximateJob[T, U, R](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, evaluator: ApproximateEvaluator[U, R], timeout: Long): PartialResult[R]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @DeveloperApi()
  81. def runJob[T, U](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  82. def runJob[T, U](rdd: RDD[T], processPartition: (TaskContext, Iterator[T]) ⇒ U, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  83. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  84. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  85. def runJob[T, U](rdd: RDD[T], func: (Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  86. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean)(implicit arg0: ClassTag[U]): Array[U]

    Permalink
    Definition Classes
    SparkContext
  87. def runJob[T, U](rdd: RDD[T], func: (TaskContext, Iterator[T]) ⇒ U, partitions: Seq[Int], allowLocal: Boolean, resultHandler: (Int, U) ⇒ Unit)(implicit arg0: ClassTag[U]): Unit

    Permalink
    Definition Classes
    SparkContext
  88. def sequenceFile[K, V](path: String, minPartitions: Int)(implicit km: ClassTag[K], vm: ClassTag[V], kcf: () ⇒ WritableConverter[K], vcf: () ⇒ WritableConverter[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  89. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V]): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  90. def sequenceFile[K, V](path: String, keyClass: Class[K], valueClass: Class[V], minPartitions: Int): RDD[(K, V)]

    Permalink
    Definition Classes
    SparkContext
  91. def setCallSite(shortCallSite: String): Unit

    Permalink
    Definition Classes
    SparkContext
  92. def setCheckpointDir(directory: String): Unit

    Permalink
    Definition Classes
    SparkContext
  93. def setJobDescription(value: String): Unit

    Permalink
    Definition Classes
    SparkContext
  94. def setJobGroup(groupId: String, description: String, interruptOnCancel: Boolean): Unit

    Permalink
    Definition Classes
    SparkContext
  95. def setLocalProperty(key: String, value: String): Unit

    Permalink
    Definition Classes
    SparkContext
  96. val sparkUser: String

    Permalink
    Definition Classes
    SparkContext
  97. val startTime: Long

    Permalink
    Definition Classes
    SparkContext
  98. val statusTracker: SparkStatusTracker

    Permalink
    Definition Classes
    SparkContext
  99. def stop(): Unit

    Permalink
    Definition Classes
    SparkContext
  100. def submitJob[T, U, R](rdd: RDD[T], processPartition: (Iterator[T]) ⇒ U, partitions: Seq[Int], resultHandler: (Int, U) ⇒ Unit, resultFunc: ⇒ R): SimpleFutureAction[R]

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @Experimental()
  101. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  102. val tachyonFolderName: String

    Permalink
    Definition Classes
    SparkContext
  103. def textFile(path: String, minPartitions: Int): RDD[String]

    Permalink
    Definition Classes
    SparkContext
  104. def toString(): String

    Permalink
    Definition Classes
    SparkContext → AnyRef → Any
  105. def union[T](first: RDD[T], rest: RDD[T]*)(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  106. def union[T](rdds: Seq[RDD[T]])(implicit arg0: ClassTag[T]): RDD[T]

    Permalink
    Definition Classes
    SparkContext
  107. def version: String

    Permalink
    Definition Classes
    SparkContext
  108. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  109. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  110. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  111. def wholeTextFiles(path: String, minPartitions: Int): RDD[(String, String)]

    Permalink
    Definition Classes
    SparkContext

Deprecated Value Members

  1. def clearFiles(): Unit

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding files no longer creates local copies that need to be deleted

  2. def clearJars(): Unit

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) adding jars no longer creates local copies that need to be deleted

  3. def defaultMinSplits: Int

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) use defaultMinPartitions

  4. def initLocalProperties(): Unit

    Permalink
    Definition Classes
    SparkContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.0.0) Properties no longer need to be explicitly initialized.

Inherited from org.apache.spark.SparkContext

Inherited from ExecutorAllocationClient

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped