class StreamProgress extends Map[SparkDataStream, connector.read.streaming.Offset]
A helper class that looks like a Map[Source, Offset].
- Alphabetic
- By Inheritance
- StreamProgress
- Map
- MapOps
- Map
- Equals
- MapFactoryDefaults
- MapOps
- PartialFunction
- Function1
- Iterable
- Iterable
- IterableFactoryDefaults
- IterableOps
- IterableOnceOps
- IterableOnce
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Instance Constructors
- new StreamProgress(baseMap: Map[SparkDataStream, connector.read.streaming.Offset] = new immutable.HashMap[SparkDataStream, OffsetV2])
Type Members
- trait GenKeySet extends AnyRef
- Attributes
- protected
- Definition Classes
- MapOps
- class ImmutableKeySet extends AbstractSet[K] with scala.collection.immutable.MapOps.GenKeySet with DefaultSerializable
- Attributes
- protected
- Definition Classes
- MapOps
- class KeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable
- Attributes
- protected
- Definition Classes
- MapOps
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- def +[V1 >: connector.read.streaming.Offset](kv: (SparkDataStream, V1)): Map[SparkDataStream, V1]
- Definition Classes
- MapOps → MapOps
- def ++(updates: IterableOnce[(SparkDataStream, connector.read.streaming.Offset)]): StreamProgress
- def ++[V2 >: connector.read.streaming.Offset](xs: IterableOnce[(SparkDataStream, V2)]): Map[SparkDataStream, V2]
- Definition Classes
- MapOps
- final def ++[B >: (SparkDataStream, connector.read.streaming.Offset)](suffix: IterableOnce[B]): Iterable[B]
- Definition Classes
- IterableOps
- Annotations
- @inline()
- final def -(key: SparkDataStream): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps
- Annotations
- @inline()
- final def --(keys: IterableOnce[SparkDataStream]): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps → MapOps
- Annotations
- @inline()
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- def addString(sb: StringBuilder, start: String, sep: String, end: String): StringBuilder
- Definition Classes
- MapOps → IterableOnceOps
- final def addString(b: StringBuilder): StringBuilder
- Definition Classes
- IterableOnceOps
- Annotations
- @inline()
- final def addString(b: StringBuilder, sep: String): StringBuilder
- Definition Classes
- IterableOnceOps
- Annotations
- @inline()
- def andThen[C](k: PartialFunction[connector.read.streaming.Offset, C]): PartialFunction[SparkDataStream, C]
- Definition Classes
- PartialFunction
- def andThen[C](k: (connector.read.streaming.Offset) => C): PartialFunction[SparkDataStream, C]
- Definition Classes
- PartialFunction → Function1
- def apply(key: SparkDataStream): connector.read.streaming.Offset
- Definition Classes
- MapOps → Function1
- Annotations
- @throws(scala.this.throws.<init>$default$1[NoSuchElementException])
- def applyOrElse[K1 <: SparkDataStream, V1 >: connector.read.streaming.Offset](x: K1, default: (K1) => V1): V1
- Definition Classes
- MapOps → PartialFunction
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- val baseMap: Map[SparkDataStream, connector.read.streaming.Offset]
- def canEqual(that: Any): Boolean
- Definition Classes
- Map → Equals
- def className: String
- Attributes
- protected[this]
- Definition Classes
- Iterable
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- final def coll: StreamProgress.this.type
- Attributes
- protected
- Definition Classes
- Iterable → IterableOps
- def collect[K2, V2](pf: PartialFunction[(SparkDataStream, connector.read.streaming.Offset), (K2, V2)]): Map[K2, V2]
- Definition Classes
- MapOps
- def collect[B](pf: PartialFunction[(SparkDataStream, connector.read.streaming.Offset), B]): Iterable[B]
- Definition Classes
- IterableOps → IterableOnceOps
- def collectFirst[B](pf: PartialFunction[(SparkDataStream, connector.read.streaming.Offset), B]): Option[B]
- Definition Classes
- IterableOnceOps
- def compose[R](k: PartialFunction[R, SparkDataStream]): PartialFunction[R, connector.read.streaming.Offset]
- Definition Classes
- PartialFunction
- def compose[A](g: (A) => SparkDataStream): (A) => connector.read.streaming.Offset
- Definition Classes
- Function1
- Annotations
- @unspecialized()
- def concat[V2 >: connector.read.streaming.Offset](suffix: IterableOnce[(SparkDataStream, V2)]): Map[SparkDataStream, V2]
- Definition Classes
- MapOps
- def concat[B >: (SparkDataStream, connector.read.streaming.Offset)](suffix: IterableOnce[B]): Iterable[B]
- Definition Classes
- IterableOps
- def contains(key: SparkDataStream): Boolean
- Definition Classes
- MapOps
- def copyToArray[B >: (SparkDataStream, connector.read.streaming.Offset)](xs: Array[B], start: Int, len: Int): Int
- Definition Classes
- IterableOnceOps
- def copyToArray[B >: (SparkDataStream, connector.read.streaming.Offset)](xs: Array[B], start: Int): Int
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecatedOverriding()
- def copyToArray[B >: (SparkDataStream, connector.read.streaming.Offset)](xs: Array[B]): Int
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecatedOverriding()
- def corresponds[B](that: IterableOnce[B])(p: ((SparkDataStream, connector.read.streaming.Offset), B) => Boolean): Boolean
- Definition Classes
- IterableOnceOps
- def count(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Int
- Definition Classes
- IterableOnceOps
- def default(key: SparkDataStream): connector.read.streaming.Offset
- Definition Classes
- MapOps
- Annotations
- @throws(scala.this.throws.<init>$default$1[NoSuchElementException])
- def drop(n: Int): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def dropRight(n: Int): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps
- def dropWhile(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def elementWise: ElementWiseExtractor[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- PartialFunction
- def empty: Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapFactoryDefaults → IterableOps
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(o: Any): Boolean
- Definition Classes
- Map → Equals → AnyRef → Any
- def exists(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Boolean
- Definition Classes
- IterableOnceOps
- def filter(pred: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def filterNot(pred: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- def find(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def flatMap[K2, V2](f: ((SparkDataStream, connector.read.streaming.Offset)) => IterableOnce[(K2, V2)]): Map[K2, V2]
- Definition Classes
- MapOps
- def flatMap[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => IterableOnce[B]): Iterable[B]
- Definition Classes
- IterableOps → IterableOnceOps
- def flatten[B](implicit asIterable: ((SparkDataStream, connector.read.streaming.Offset)) => IterableOnce[B]): Iterable[B]
- Definition Classes
- IterableOps → IterableOnceOps
- def fold[A1 >: (SparkDataStream, connector.read.streaming.Offset)](z: A1)(op: (A1, A1) => A1): A1
- Definition Classes
- IterableOnceOps
- def foldLeft[B](z: B)(op: (B, (SparkDataStream, connector.read.streaming.Offset)) => B): B
- Definition Classes
- IterableOnceOps
- def foldRight[B](z: B)(op: ((SparkDataStream, connector.read.streaming.Offset), B) => B): B
- Definition Classes
- IterableOnceOps
- def forall(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Boolean
- Definition Classes
- IterableOnceOps
- def foreach[U](f: ((SparkDataStream, connector.read.streaming.Offset)) => U): Unit
- Definition Classes
- IterableOnceOps
- def foreachEntry[U](f: (SparkDataStream, connector.read.streaming.Offset) => U): Unit
- Definition Classes
- MapOps
- def fromSpecific(coll: IterableOnce[(SparkDataStream, connector.read.streaming.Offset)]): Map[SparkDataStream, connector.read.streaming.Offset]
- Attributes
- protected
- Definition Classes
- MapFactoryDefaults → IterableOps
- def get(key: SparkDataStream): Option[connector.read.streaming.Offset]
- Definition Classes
- StreamProgress → MapOps
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getOrElse[V1 >: connector.read.streaming.Offset](key: SparkDataStream, default: => V1): V1
- Definition Classes
- MapOps
- def groupBy[K](f: ((SparkDataStream, connector.read.streaming.Offset)) => K): Map[K, Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def groupMap[K, B](key: ((SparkDataStream, connector.read.streaming.Offset)) => K)(f: ((SparkDataStream, connector.read.streaming.Offset)) => B): Map[K, Iterable[B]]
- Definition Classes
- IterableOps
- def groupMapReduce[K, B](key: ((SparkDataStream, connector.read.streaming.Offset)) => K)(f: ((SparkDataStream, connector.read.streaming.Offset)) => B)(reduce: (B, B) => B): Map[K, B]
- Definition Classes
- IterableOps
- def grouped(size: Int): Iterator[Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def hashCode(): Int
- Definition Classes
- Map → AnyRef → Any
- def head: (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOps
- def headOption: Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOps
- def init: Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps
- def inits: Iterator[Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def isDefinedAt(key: SparkDataStream): Boolean
- Definition Classes
- MapOps → PartialFunction
- def isEmpty: Boolean
- Definition Classes
- IterableOnceOps
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- def isTraversableAgain: Boolean
- Definition Classes
- IterableOps → IterableOnceOps
- def iterableFactory: IterableFactory[Iterable]
- Definition Classes
- Iterable → Iterable → IterableOps
- def iterator: Iterator[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- StreamProgress → IterableOnce
- def keySet: Set[SparkDataStream]
- Definition Classes
- MapOps → MapOps
- def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[SparkDataStream, S]): S
- Definition Classes
- MapOps
- def keys: Iterable[SparkDataStream]
- Definition Classes
- MapOps
- def keysIterator: Iterator[SparkDataStream]
- Definition Classes
- MapOps
- def knownSize: Int
- Definition Classes
- IterableOnce
- def last: (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOps
- def lastOption: Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOps
- def lazyZip[B](that: Iterable[B]): LazyZip2[(SparkDataStream, connector.read.streaming.Offset), B, StreamProgress.this.type]
- Definition Classes
- Iterable
- def lift: (SparkDataStream) => Option[connector.read.streaming.Offset]
- Definition Classes
- PartialFunction
- def map[K2, V2](f: ((SparkDataStream, connector.read.streaming.Offset)) => (K2, V2)): Map[K2, V2]
- Definition Classes
- MapOps
- def map[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => B): Iterable[B]
- Definition Classes
- IterableOps → IterableOnceOps
- def mapFactory: MapFactory[Map]
- Definition Classes
- Map → Map → MapOps
- final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): Map[K2, V2]
- Attributes
- protected
- Definition Classes
- MapOps
- Annotations
- @inline()
- def max[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit ord: Ordering[B]): (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOnceOps
- def maxBy[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => B)(implicit cmp: Ordering[B]): (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOnceOps
- def maxByOption[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => B)(implicit cmp: Ordering[B]): Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def maxOption[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit ord: Ordering[B]): Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def min[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit ord: Ordering[B]): (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOnceOps
- def minBy[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => B)(implicit cmp: Ordering[B]): (SparkDataStream, connector.read.streaming.Offset)
- Definition Classes
- IterableOnceOps
- def minByOption[B](f: ((SparkDataStream, connector.read.streaming.Offset)) => B)(implicit cmp: Ordering[B]): Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def minOption[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit ord: Ordering[B]): Option[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- final def mkString: String
- Definition Classes
- IterableOnceOps
- Annotations
- @inline()
- final def mkString(sep: String): String
- Definition Classes
- IterableOnceOps
- Annotations
- @inline()
- final def mkString(start: String, sep: String, end: String): String
- Definition Classes
- IterableOnceOps
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def newSpecificBuilder: Builder[(SparkDataStream, connector.read.streaming.Offset), Map[SparkDataStream, connector.read.streaming.Offset]]
- Attributes
- protected
- Definition Classes
- MapFactoryDefaults → IterableOps
- def nonEmpty: Boolean
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecatedOverriding()
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def orElse[A1 <: SparkDataStream, B1 >: connector.read.streaming.Offset](that: PartialFunction[A1, B1]): PartialFunction[A1, B1]
- Definition Classes
- PartialFunction
- def partition(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): (Map[SparkDataStream, connector.read.streaming.Offset], Map[SparkDataStream, connector.read.streaming.Offset])
- Definition Classes
- IterableOps
- def partitionMap[A1, A2](f: ((SparkDataStream, connector.read.streaming.Offset)) => Either[A1, A2]): (Iterable[A1], Iterable[A2])
- Definition Classes
- IterableOps
- def product[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit num: Numeric[B]): B
- Definition Classes
- IterableOnceOps
- def reduce[B >: (SparkDataStream, connector.read.streaming.Offset)](op: (B, B) => B): B
- Definition Classes
- IterableOnceOps
- def reduceLeft[B >: (SparkDataStream, connector.read.streaming.Offset)](op: (B, (SparkDataStream, connector.read.streaming.Offset)) => B): B
- Definition Classes
- IterableOnceOps
- def reduceLeftOption[B >: (SparkDataStream, connector.read.streaming.Offset)](op: (B, (SparkDataStream, connector.read.streaming.Offset)) => B): Option[B]
- Definition Classes
- IterableOnceOps
- def reduceOption[B >: (SparkDataStream, connector.read.streaming.Offset)](op: (B, B) => B): Option[B]
- Definition Classes
- IterableOnceOps
- def reduceRight[B >: (SparkDataStream, connector.read.streaming.Offset)](op: ((SparkDataStream, connector.read.streaming.Offset), B) => B): B
- Definition Classes
- IterableOnceOps
- def reduceRightOption[B >: (SparkDataStream, connector.read.streaming.Offset)](op: ((SparkDataStream, connector.read.streaming.Offset), B) => B): Option[B]
- Definition Classes
- IterableOnceOps
- def removed(key: SparkDataStream): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- StreamProgress → MapOps
- def removedAll(keys: IterableOnce[SparkDataStream]): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps
- def reversed: Iterable[(SparkDataStream, connector.read.streaming.Offset)]
- Attributes
- protected
- Definition Classes
- IterableOnceOps
- def runWith[U](action: (connector.read.streaming.Offset) => U): (SparkDataStream) => Boolean
- Definition Classes
- PartialFunction
- def scan[B >: (SparkDataStream, connector.read.streaming.Offset)](z: B)(op: (B, B) => B): Iterable[B]
- Definition Classes
- IterableOps
- def scanLeft[B](z: B)(op: (B, (SparkDataStream, connector.read.streaming.Offset)) => B): Iterable[B]
- Definition Classes
- IterableOps → IterableOnceOps
- def scanRight[B](z: B)(op: ((SparkDataStream, connector.read.streaming.Offset), B) => B): Iterable[B]
- Definition Classes
- IterableOps
- def size: Int
- Definition Classes
- IterableOnceOps
- def sizeCompare(that: Iterable[_]): Int
- Definition Classes
- IterableOps
- def sizeCompare(otherSize: Int): Int
- Definition Classes
- IterableOps
- final def sizeIs: SizeCompareOps
- Definition Classes
- IterableOps
- Annotations
- @inline()
- def slice(from: Int, until: Int): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def sliding(size: Int, step: Int): Iterator[Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def sliding(size: Int): Iterator[Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def span(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): (Map[SparkDataStream, connector.read.streaming.Offset], Map[SparkDataStream, connector.read.streaming.Offset])
- Definition Classes
- IterableOps → IterableOnceOps
- def splitAt(n: Int): (Map[SparkDataStream, connector.read.streaming.Offset], Map[SparkDataStream, connector.read.streaming.Offset])
- Definition Classes
- IterableOps → IterableOnceOps
- def stepper[S <: Stepper[_]](implicit shape: StepperShape[(SparkDataStream, connector.read.streaming.Offset), S]): S
- Definition Classes
- IterableOnce
- def stringPrefix: String
- Attributes
- protected[this]
- Definition Classes
- Map → Iterable
- def sum[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit num: Numeric[B]): B
- Definition Classes
- IterableOnceOps
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def tail: Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps
- def tails: Iterator[Map[SparkDataStream, connector.read.streaming.Offset]]
- Definition Classes
- IterableOps
- def take(n: Int): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def takeRight(n: Int): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps
- def takeWhile(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def tapEach[U](f: ((SparkDataStream, connector.read.streaming.Offset)) => U): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps → IterableOnceOps
- def to[C1](factory: Factory[(SparkDataStream, connector.read.streaming.Offset), C1]): C1
- Definition Classes
- IterableOnceOps
- def toArray[B >: (SparkDataStream, connector.read.streaming.Offset)](implicit arg0: ClassTag[B]): Array[B]
- Definition Classes
- IterableOnceOps
- final def toBuffer[B >: (SparkDataStream, connector.read.streaming.Offset)]: Buffer[B]
- Definition Classes
- IterableOnceOps
- Annotations
- @inline()
- def toIndexedSeq: IndexedSeq[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def toList: List[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- final def toMap[K2, V2](implicit ev: <:<[(SparkDataStream, connector.read.streaming.Offset), (K2, V2)]): Map[K2, V2]
- Definition Classes
- Map → IterableOnceOps
- def toOffsetSeq(source: Seq[SparkDataStream], metadata: OffsetSeqMetadata): OffsetSeq
- def toSeq: Seq[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def toSet[B >: (SparkDataStream, connector.read.streaming.Offset)]: Set[B]
- Definition Classes
- IterableOnceOps
- def toString(): String
- Definition Classes
- StreamProgress → Map → Function1 → Iterable → AnyRef → Any
- def toVector: Vector[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- def transform[W](f: (SparkDataStream, connector.read.streaming.Offset) => W): Map[SparkDataStream, W]
- Definition Classes
- MapOps
- def transpose[B](implicit asIterable: ((SparkDataStream, connector.read.streaming.Offset)) => Iterable[B]): Iterable[Iterable[B]]
- Definition Classes
- IterableOps
- def unapply(a: SparkDataStream): Option[connector.read.streaming.Offset]
- Definition Classes
- PartialFunction
- def unzip[A1, A2](implicit asPair: ((SparkDataStream, connector.read.streaming.Offset)) => (A1, A2)): (Iterable[A1], Iterable[A2])
- Definition Classes
- IterableOps
- def unzip3[A1, A2, A3](implicit asTriple: ((SparkDataStream, connector.read.streaming.Offset)) => (A1, A2, A3)): (Iterable[A1], Iterable[A2], Iterable[A3])
- Definition Classes
- IterableOps
- def updated[B1 >: connector.read.streaming.Offset](key: SparkDataStream, value: B1): Map[SparkDataStream, B1]
- Definition Classes
- StreamProgress → MapOps
- def updatedWith[V1 >: connector.read.streaming.Offset](key: SparkDataStream)(remappingFunction: (Option[connector.read.streaming.Offset]) => Option[V1]): Map[SparkDataStream, V1]
- Definition Classes
- MapOps
- def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[connector.read.streaming.Offset, S]): S
- Definition Classes
- MapOps
- def values: Iterable[connector.read.streaming.Offset]
- Definition Classes
- MapOps
- def valuesIterator: Iterator[connector.read.streaming.Offset]
- Definition Classes
- MapOps
- def view: MapView[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps → IterableOps
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- def withDefault[V1 >: connector.read.streaming.Offset](d: (SparkDataStream) => V1): Map[SparkDataStream, V1]
- Definition Classes
- Map
- def withDefaultValue[V1 >: connector.read.streaming.Offset](d: V1): Map[SparkDataStream, V1]
- Definition Classes
- Map
- def withFilter(p: ((SparkDataStream, connector.read.streaming.Offset)) => Boolean): WithFilter[SparkDataStream, connector.read.streaming.Offset, [x]Iterable[x], [x, y]Map[x, y]]
- Definition Classes
- MapFactoryDefaults → IterableOps
- def zip[B](that: IterableOnce[B]): Iterable[((SparkDataStream, connector.read.streaming.Offset), B)]
- Definition Classes
- IterableOps
- def zipAll[A1 >: (SparkDataStream, connector.read.streaming.Offset), B](that: Iterable[B], thisElem: A1, thatElem: B): Iterable[(A1, B)]
- Definition Classes
- IterableOps
- def zipWithIndex: Iterable[((SparkDataStream, connector.read.streaming.Offset), Int)]
- Definition Classes
- IterableOps → IterableOnceOps
Deprecated Value Members
- def +[V1 >: connector.read.streaming.Offset](elem1: (SparkDataStream, V1), elem2: (SparkDataStream, V1), elems: (SparkDataStream, V1)*): Map[SparkDataStream, V1]
- Definition Classes
- MapOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use ++ with an explicit collection argument instead of + with varargs
- def ++:[V1 >: connector.read.streaming.Offset](that: IterableOnce[(SparkDataStream, V1)]): Map[SparkDataStream, V1]
- Definition Classes
- MapOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use ++ instead of ++: for collections of type Iterable
- def ++:[B >: (SparkDataStream, connector.read.streaming.Offset)](that: IterableOnce[B]): Iterable[B]
- Definition Classes
- IterableOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use ++ instead of ++: for collections of type Iterable
- def -(key1: SparkDataStream, key2: SparkDataStream, keys: SparkDataStream*): Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use -- with an explicit collection
- final def /:[B](z: B)(op: (B, (SparkDataStream, connector.read.streaming.Offset)) => B): B
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated @inline()
- Deprecated
(Since version 2.13.0) Use foldLeft instead of /:
- final def :\[B](z: B)(op: ((SparkDataStream, connector.read.streaming.Offset), B) => B): B
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated @inline()
- Deprecated
(Since version 2.13.0) Use foldRight instead of :\
- def aggregate[B](z: => B)(seqop: (B, (SparkDataStream, connector.read.streaming.Offset)) => B, combop: (B, B) => B): B
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0)
aggregate
is not relevant for sequential collections. UsefoldLeft(z)(seqop)
instead.
- def companion: IterableFactory[[_]Iterable[_]]
- Definition Classes
- IterableOps
- Annotations
- @deprecated @deprecatedOverriding() @inline()
- Deprecated
(Since version 2.13.0) Use iterableFactory instead
- final def copyToBuffer[B >: (SparkDataStream, connector.read.streaming.Offset)](dest: Buffer[B]): Unit
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated @inline()
- Deprecated
(Since version 2.13.0) Use
dest ++= coll
instead
- def filterKeys(p: (SparkDataStream) => Boolean): MapView[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- MapOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).
- def hasDefiniteSize: Boolean
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)
- def mapValues[W](f: (connector.read.streaming.Offset) => W): MapView[SparkDataStream, W]
- Definition Classes
- MapOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).
- final def repr: Map[SparkDataStream, connector.read.streaming.Offset]
- Definition Classes
- IterableOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use coll instead of repr in a collection implementation, use the collection value itself from the outside
- def seq: StreamProgress.this.type
- Definition Classes
- Iterable
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Iterable.seq always returns the iterable itself
- final def toIterable: StreamProgress.this.type
- Definition Classes
- Iterable → IterableOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.7) toIterable is internal and will be made protected; its name is similar to
toList
ortoSeq
, but it doesn't copy non-immutable collections
- final def toIterator: Iterator[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated @inline()
- Deprecated
(Since version 2.13.0) Use .iterator instead of .toIterator
- final def toStream: Stream[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOnceOps
- Annotations
- @deprecated @inline()
- Deprecated
(Since version 2.13.0) Use .to(LazyList) instead of .toStream
- final def toTraversable: Traversable[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) toTraversable is internal and will be made protected; its name is similar to
toList
ortoSeq
, but it doesn't copy non-immutable collections
- def view(from: Int, until: Int): View[(SparkDataStream, connector.read.streaming.Offset)]
- Definition Classes
- IterableOps
- Annotations
- @deprecated
- Deprecated
(Since version 2.13.0) Use .view.slice(from, until) instead of .view(from, until)