Packages

case class MemoryStream[A](id: Int, sqlContext: SQLContext, numPartitions: Option[Int] = None)(implicit evidence$4: Encoder[A]) extends MemoryStreamBase[A] with MicroBatchStream with Logging with Product with Serializable

A Source that produces value stored in memory as they are added by the user. This Source is intended for use in unit tests as it can only replay data when the object is still available.

If numPartitions is provided, the rows will be redistributed to the given number of partitions.

Linear Supertypes
Serializable, Serializable, Product, Equals, Logging, MicroBatchStream, MemoryStreamBase[A], SparkDataStream, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. MemoryStream
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. Logging
  7. MicroBatchStream
  8. MemoryStreamBase
  9. SparkDataStream
  10. AnyRef
  11. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new MemoryStream(id: Int, sqlContext: SQLContext, numPartitions: Option[Int] = None)(implicit arg0: Encoder[A])

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def addData(data: TraversableOnce[A]): Offset
    Definition Classes
    MemoryStreamMemoryStreamBase
  5. def addData(data: A*): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamBase
  6. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  7. val attributes: Seq[AttributeReference]
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  8. val batches: ListBuffer[Array[UnsafeRow]]

    All batches from lastCommittedOffset + 1 to currentOffset, inclusive.

    All batches from lastCommittedOffset + 1 to currentOffset, inclusive. Stored in a ListBuffer to facilitate removing committed batches.

    Attributes
    protected
  9. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  10. def commit(end: connector.read.streaming.Offset): Unit
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  11. def createReaderFactory(): PartitionReaderFactory
    Definition Classes
    MemoryStream → MicroBatchStream
  12. var currentOffset: LongOffset
    Attributes
    protected
  13. def deserializeOffset(json: String): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  14. val encoder: ExpressionEncoder[A]
    Definition Classes
    MemoryStreamBase
  15. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  16. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  17. def fullSchema(): StructType
    Definition Classes
    MemoryStreamBase
  18. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  19. val id: Int
  20. def initialOffset(): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  21. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  22. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  23. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  24. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  25. var lastOffsetCommitted: LongOffset

    Last offset that was discarded, or -1 if no commits have occurred.

    Last offset that was discarded, or -1 if no commits have occurred. Note that the value -1 is used in calculations below and isn't just an arbitrary constant.

    Attributes
    protected
  26. def latestOffset(): connector.read.streaming.Offset
    Definition Classes
    MemoryStream → MicroBatchStream
  27. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  28. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  29. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  30. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  33. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  34. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  35. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  36. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  37. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  38. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  39. val logicalPlan: LogicalPlan
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  40. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  41. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  42. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  43. val numPartitions: Option[Int]
  44. val output: Seq[Attribute]
    Attributes
    protected
  45. def planInputPartitions(start: connector.read.streaming.Offset, end: connector.read.streaming.Offset): Array[InputPartition]
    Definition Classes
    MemoryStream → MicroBatchStream
  46. def reset(): Unit
  47. val sqlContext: SQLContext
  48. var startOffset: LongOffset
    Attributes
    protected
  49. def stop(): Unit
    Definition Classes
    MemoryStream → SparkDataStream
  50. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  51. def toDF(): DataFrame
    Definition Classes
    MemoryStreamBase
  52. def toDS(): Dataset[A]
    Definition Classes
    MemoryStreamBase
  53. lazy val toRow: Serializer[A]
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  54. def toString(): String
    Definition Classes
    MemoryStream → AnyRef → Any
  55. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  56. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  57. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from Logging

Inherited from MicroBatchStream

Inherited from MemoryStreamBase[A]

Inherited from SparkDataStream

Inherited from AnyRef

Inherited from Any

Ungrouped