Class

org.dianahep.histogrammar.sparksql

DataFrameHistogrammarMethods

Related Doc: package sparksql

Permalink

implicit class DataFrameHistogrammarMethods extends AnyRef

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. DataFrameHistogrammarMethods
  2. AnyRef
  3. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new DataFrameHistogrammarMethods(df: DataFrame)

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. def Average(quantity: UserFcn[Row, Double]): Averaging[Row]

    Permalink
  5. def Bag[RANGE](quantity: UserFcn[Row, RANGE], range: String = "")(implicit arg0: ClassTag[RANGE]): Bagging[Row, RANGE]

    Permalink
  6. def Bin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, U <: Container[U] with Aggregation { type Datum >: org.apache.spark.sql.Row }, O <: Container[O] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](num: Int, low: Double, high: Double, quantity: UserFcn[Row, Double], value: ⇒ V = Count(), underflow: U = Count(), overflow: O = Count(), nanflow: N = Count()): Binning[Row, V, U, O, N]

    Permalink
  7. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation, C8 <: Container[C8] with Aggregation, C9 <: Container[C9] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7, i8: C8, i9: C9)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7], e08: Compatible[C0, C8], e09: Compatible[C0, C9]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  8. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation, C8 <: Container[C8] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7, i8: C8)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7], e08: Compatible[C0, C8]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  9. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  10. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  11. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  12. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  13. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  14. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation](i0: C0, i1: C1, i2: C2)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  15. def Branch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation](i0: C0, i1: C1)(implicit e01: Compatible[C0, C1]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  16. def Branch[C0 <: Container[C0] with Aggregation](i0: C0): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  17. def Categorize[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, String], value: ⇒ V = Count()): Categorizing[Row, V]

    Permalink
  18. def CentrallyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): CentrallyBinning[Row, V, N]

    Permalink
  19. def Count(transform: UserFcn[Double, Double] = ...): CONTAINER forSome {type CONTAINER <: Container[CONTAINER] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  20. def Deviate(quantity: UserFcn[Row, Double]): Deviating[Row]

    Permalink
  21. def Fraction[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, Double], value: ⇒ V = Count()): Fractioning[Row, V]

    Permalink
  22. def Index[V <: Container[V] with Aggregation](values: V*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  23. def IrregularlyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): IrregularlyBinning[Row, V, N]

    Permalink
  24. def Label[V <: Container[V] with Aggregation](pairs: (String, V)*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  25. def Maximize(quantity: UserFcn[Row, Double]): Maximizing[Row]

    Permalink
  26. def Minimize(quantity: UserFcn[Row, Double]): Minimizing[Row]

    Permalink
  27. def Select[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, Double], cut: V = Count()): Selecting[Row, V]

    Permalink
  28. def SparselyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](binWidth: Double, quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count(), origin: Double = 0.0): SparselyBinning[Row, V, N]

    Permalink
  29. def Stack[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): Stacking[Row, V, N]

    Permalink
  30. def Sum(quantity: UserFcn[Row, Double]): Summing[Row]

    Permalink
  31. def UntypedLabel[F <: Container[F] with Aggregation](first: (String, F), rest: (String, Container[_] with Aggregation)*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}

    Permalink
  32. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  33. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  34. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  35. def equals(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  36. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  37. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  38. def hashCode(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  39. def histogrammar[CONTAINER <: Container[CONTAINER] with Aggregation { type Datum = org.apache.spark.sql.Row }](container: CONTAINER)(implicit arg0: ClassTag[CONTAINER]): CONTAINER

    Permalink
  40. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  41. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  42. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  43. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  44. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  45. def toString(): String

    Permalink
    Definition Classes
    AnyRef → Any
  46. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  47. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  48. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Inherited from AnyRef

Inherited from Any

Ungrouped