Packages

case class MemoryStream[A](id: Int, sqlContext: SQLContext, numPartitions: Option[Int] = None)(implicit evidence$4: Encoder[A]) extends MemoryStreamBase[A] with MicroBatchStream with SupportsTriggerAvailableNow with Logging with Product with Serializable

A Source that produces value stored in memory as they are added by the user. This Source is intended for use in unit tests as it can only replay data when the object is still available.

If numPartitions is provided, the rows will be redistributed to the given number of partitions.

Linear Supertypes
Serializable, Serializable, Product, Equals, Logging, SupportsTriggerAvailableNow, SupportsAdmissionControl, MicroBatchStream, MemoryStreamBase[A], SparkDataStream, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. MemoryStream
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. Logging
  7. SupportsTriggerAvailableNow
  8. SupportsAdmissionControl
  9. MicroBatchStream
  10. MemoryStreamBase
  11. SparkDataStream
  12. AnyRef
  13. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new MemoryStream(id: Int, sqlContext: SQLContext, numPartitions: Option[Int] = None)(implicit arg0: Encoder[A])

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def addData(data: TraversableOnce[A]): Offset
    Definition Classes
    MemoryStreamMemoryStreamBase
  5. def addData(data: A*): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamBase
  6. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  7. val attributes: Seq[AttributeReference]
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  8. val batches: ListBuffer[Array[UnsafeRow]]

    All batches from lastCommittedOffset + 1 to currentOffset, inclusive.

    All batches from lastCommittedOffset + 1 to currentOffset, inclusive. Stored in a ListBuffer to facilitate removing committed batches.

    Attributes
    protected
  9. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  10. def commit(end: connector.read.streaming.Offset): Unit
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  11. def createReaderFactory(): PartitionReaderFactory
    Definition Classes
    MemoryStream → MicroBatchStream
  12. var currentOffset: LongOffset
    Attributes
    protected
  13. def deserializeOffset(json: String): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  14. val encoder: ExpressionEncoder[A]
    Definition Classes
    MemoryStreamBase
  15. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  16. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  17. def fullSchema(): StructType
    Definition Classes
    MemoryStreamBase
  18. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  19. def getDefaultReadLimit(): ReadLimit
    Definition Classes
    SupportsAdmissionControl
  20. val id: Int
  21. def initialOffset(): connector.read.streaming.Offset
    Definition Classes
    MemoryStreamMemoryStreamBase → SparkDataStream
  22. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  23. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  24. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  25. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  26. var lastOffsetCommitted: LongOffset

    Last offset that was discarded, or -1 if no commits have occurred.

    Last offset that was discarded, or -1 if no commits have occurred. Note that the value -1 is used in calculations below and isn't just an arbitrary constant.

    Attributes
    protected
  27. def latestOffset(startOffset: connector.read.streaming.Offset, limit: ReadLimit): connector.read.streaming.Offset
    Definition Classes
    MemoryStream → SupportsAdmissionControl
  28. def latestOffset(): connector.read.streaming.Offset
    Definition Classes
    MemoryStream → MicroBatchStream
  29. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  30. def logDebug(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  31. def logDebug(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  32. def logError(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  33. def logError(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  34. def logInfo(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  35. def logInfo(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  36. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  37. def logTrace(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  38. def logTrace(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  39. def logWarning(msg: ⇒ String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  40. def logWarning(msg: ⇒ String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  41. val logicalPlan: LogicalPlan
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  42. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  43. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  44. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  45. val numPartitions: Option[Int]
  46. val output: Seq[Attribute]
    Attributes
    protected
  47. def planInputPartitions(start: connector.read.streaming.Offset, end: connector.read.streaming.Offset): Array[InputPartition]
    Definition Classes
    MemoryStream → MicroBatchStream
  48. def prepareForTriggerAvailableNow(): Unit
    Definition Classes
    MemoryStream → SupportsTriggerAvailableNow
  49. def reportLatestOffset(): connector.read.streaming.Offset
    Definition Classes
    SupportsAdmissionControl
  50. def reset(): Unit
  51. val sqlContext: SQLContext
  52. var startOffset: LongOffset
    Attributes
    protected
  53. def stop(): Unit
    Definition Classes
    MemoryStream → SparkDataStream
  54. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  55. def toDF(): DataFrame
    Definition Classes
    MemoryStreamBase
  56. def toDS(): Dataset[A]
    Definition Classes
    MemoryStreamBase
  57. lazy val toRow: Serializer[A]
    Attributes
    protected
    Definition Classes
    MemoryStreamBase
  58. def toString(): String
    Definition Classes
    MemoryStream → AnyRef → Any
  59. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  60. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  61. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from Logging

Inherited from SupportsTriggerAvailableNow

Inherited from SupportsAdmissionControl

Inherited from MicroBatchStream

Inherited from MemoryStreamBase[A]

Inherited from SparkDataStream

Inherited from AnyRef

Inherited from Any

Ungrouped