object
MetricsContext
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
implicit
def
rddToInstrumentedOrderedRDD[K, V](rdd: RDD[(K, V)])(implicit arg0: Ordering[K], arg1: ClassTag[K], arg2: ClassTag[V]): InstrumentedOrderedRDDFunctions[K, V]
-
implicit
def
rddToInstrumentedPairRDD[K, V](rdd: RDD[(K, V)])(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null): InstrumentedPairRDDFunctions[K, V]
-
implicit
def
rddToInstrumentedRDD[T](rdd: RDD[T])(implicit arg0: ClassTag[T]): InstrumentedRDDFunctions[T]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any
Contains implicit conversions which enable instrumentation of Spark operations. This class should be used instead of org.apache.spark.SparkContext when instrumentation is required. Usage is as follows:
Then, when any operations are performed on
instrumentedRDD
the RDD operation will be instrumented, along with any functions that operate on its data. All subsequent RDD operations will be instrumented until theunInstrument
method is called on an RDD.When using this class, it is not a good idea to import
SparkContext._
, as the implicit conversions in there may conflict with those in here -- instead it is better to import only the specific parts ofSparkContext
that are needed.