defBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, U <: Container[U] with Aggregation { type Datum >: org.apache.spark.sql.Row }, O <: Container[O] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](num: Int, low: Double, high: Double, quantity: UserFcn[Row, Double], value: ⇒ V = Count(), underflow: U = Count(), overflow: O = Count(), nanflow: N = Count()): Binning[Row, V, U, O, N]
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation, C8 <: Container[C8] with Aggregation, C9 <: Container[C9] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7, i8: C8, i9: C9)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7], e08: Compatible[C0, C8], e09: Compatible[C0, C9]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation, C8 <: Container[C8] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7, i8: C8)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7], e08: Compatible[C0, C8]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation, C7 <: Container[C7] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6, i7: C7)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6], e07: Compatible[C0, C7]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation, C6 <: Container[C6] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5, i6: C6)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5], e06: Compatible[C0, C6]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation, C5 <: Container[C5] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4, i5: C5)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4], e05: Compatible[C0, C5]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation, C4 <: Container[C4] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3, i4: C4)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3], e04: Compatible[C0, C4]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation, C3 <: Container[C3] with Aggregation](i0: C0, i1: C1, i2: C2, i3: C3)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2], e03: Compatible[C0, C3]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation, C2 <: Container[C2] with Aggregation](i0: C0, i1: C1, i2: C2)(implicit e01: Compatible[C0, C1], e02: Compatible[C0, C2]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation, C1 <: Container[C1] with Aggregation](i0: C0, i1: C1)(implicit e01: Compatible[C0, C1]): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defBranch[C0 <: Container[C0] with Aggregation](i0: C0): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defCategorize[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, String], value: ⇒ V = Count()): Categorizing[Row, V]
defCentrallyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): CentrallyBinning[Row, V, N]
defCount(transform: UserFcn[Double, Double] = ...): CONTAINER forSome {type CONTAINER <: Container[CONTAINER] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defFraction[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, Double], value: ⇒ V = Count()): Fractioning[Row, V]
defIndex[V <: Container[V] with Aggregation](values: V*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defIrregularlyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): IrregularlyBinning[Row, V, N]
defLabel[V <: Container[V] with Aggregation](pairs: (String, V)*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defSelect[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }](quantity: UserFcn[Row, Double], cut: V = Count()): Selecting[Row, V]
defSparselyBin[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](binWidth: Double, quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count(), origin: Double = 0.0): SparselyBinning[Row, V, N]
defStack[V <: Container[V] with Aggregation { type Datum >: org.apache.spark.sql.Row }, N <: Container[N] with Aggregation { type Datum >: org.apache.spark.sql.Row }](bins: Iterable[Double], quantity: UserFcn[Row, Double], value: ⇒ V = Count(), nanflow: N = Count()): Stacking[Row, V, N]
defUntypedLabel[F <: Container[F] with Aggregation](first: (String, F), rest: (String, Container[_] with Aggregation)*): C forSome {type C <: Container[C] with Aggregation { type Datum = org.apache.spark.sql.Row }}
defhistogrammar[CONTAINER <: Container[CONTAINER] with Aggregation { type Datum = org.apache.spark.sql.Row }](container: CONTAINER)(implicit arg0: ClassTag[CONTAINER]): CONTAINER