object EmbeddedKafkaStreams extends EmbeddedKafkaStreams
- Alphabetic
- By Inheritance
- EmbeddedKafkaStreams
- EmbeddedKafkaStreams
- EmbeddedKafka
- EmbeddedKafkaOps
- KafkaOps
- ZooKeeperOps
- ProducerOps
- ConsumerOps
- AdminOps
- EmbeddedKafkaSupport
- EmbeddedKafkaStreamsSupport
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- val adminClientCloseTimeout: FiniteDuration
- Attributes
- protected
- Definition Classes
- AdminOps
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- val autoCreateTopics: Boolean
- Attributes
- protected
- Definition Classes
- KafkaOps
- val brokerId: Short
- Attributes
- protected
- Definition Classes
- KafkaOps
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @native()
- def consumeFirstKeyedMessageFrom[K, V](topic: String, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): (K, V)
- Definition Classes
- ConsumerOps
- Annotations
- @throws(classOf[java.util.concurrent.TimeoutException]) @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def consumeFirstMessageFrom[V](topic: String, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): V
- Definition Classes
- ConsumerOps
- Annotations
- @throws(classOf[java.util.concurrent.TimeoutException]) @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def consumeFirstStringMessageFrom(topic: String, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig): String
- Definition Classes
- ConsumerOps
- def consumeNumberKeyedMessagesFrom[K, V](topic: String, number: Int, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): List[(K, V)]
- Definition Classes
- ConsumerOps
- def consumeNumberKeyedMessagesFromTopics[K, V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): Map[String, List[(K, V)]]
- Definition Classes
- ConsumerOps
- def consumeNumberMessagesFrom[V](topic: String, number: Int, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): List[V]
- Definition Classes
- ConsumerOps
- def consumeNumberMessagesFromTopics[V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): Map[String, List[V]]
- Definition Classes
- ConsumerOps
- def consumeNumberStringMessagesFrom(topic: String, number: Int, autoCommit: Boolean, timeout: Duration)(implicit config: EmbeddedKafkaConfig): List[String]
- Definition Classes
- ConsumerOps
- val consumerPollingTimeout: FiniteDuration
- Attributes
- protected
- Definition Classes
- ConsumerOps
- def createCustomTopic(topic: String, topicConfig: Map[String, String], partitions: Int, replicationFactor: Int)(implicit config: EmbeddedKafkaConfig): Try[Unit]
- Definition Classes
- AdminOps
- def deleteTopics(topics: List[String])(implicit config: EmbeddedKafkaConfig): Try[Unit]
- Definition Classes
- AdminOps
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable])
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- val logCleanerDedupeBufferSize: Int
- Attributes
- protected
- Definition Classes
- KafkaOps
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val producerPublishTimeout: FiniteDuration
- Attributes
- protected
- Definition Classes
- ProducerOps
- def publishStringMessageToKafka(topic: String, message: String)(implicit config: EmbeddedKafkaConfig): Unit
- Definition Classes
- ProducerOps
- def publishToKafka[K, T](topic: String, messages: Seq[(K, T)])(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- ProducerOps
- Annotations
- @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def publishToKafka[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- ProducerOps
- Annotations
- @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def publishToKafka[T](producerRecord: ProducerRecord[String, T])(implicit config: EmbeddedKafkaConfig, serializer: Serializer[T]): Unit
- Definition Classes
- ProducerOps
- Annotations
- @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def publishToKafka[T](topic: String, message: T)(implicit config: EmbeddedKafkaConfig, serializer: Serializer[T]): Unit
- Definition Classes
- ProducerOps
- Annotations
- @throws(classOf[io.github.embeddedkafka.KafkaUnavailableException])
- def runStreams[T](topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: => T)(implicit config: EmbeddedKafkaConfig): T
Execute Kafka streams and pass a block of code that can operate while the streams are active.
Execute Kafka streams and pass a block of code that can operate while the streams are active. The code block can be used for publishing and consuming messages in Kafka.
- topicsToCreate
the topics that should be created in Kafka before launching the streams.
- topology
the streams topology that will be used to instantiate the streams with a default configuration (all state directories are different and in temp folders)
- extraConfig
additional Kafka Streams configuration (overwrite existing keys in default config)
- block
the code block that will executed while the streams are active. Once the block has been executed the streams will be closed.
- Definition Classes
- EmbeddedKafkaStreamsSupport
- def runStreamsOnFoundPort[T](config: EmbeddedKafkaConfig)(topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: (EmbeddedKafkaConfig) => T): T
Execute Kafka streams and pass a block of code that can operate while the streams are active.
Execute Kafka streams and pass a block of code that can operate while the streams are active. The code block can be used for publishing and consuming messages in Kafka. The actual ports of the servers will be detected and inserted into a copied version of the EmbeddedKafkaConfig that gets passed to body. This is useful if you set any port to
0
, which will listen on an arbitrary available port.- config
the user-defined EmbeddedKafkaConfig
- topicsToCreate
the topics that should be created in Kafka before launching the streams.
- topology
the streams topology that will be used to instantiate the streams with a default configuration (all state directories are different and in temp folders)
- extraConfig
additional Kafka Streams configuration (overwrite existing keys in default config)
- block
the code block that will executed while the streams are active, given an EmbeddedKafkaConfig with the actual ports the servers are running on. Once the block has been executed the streams will be closed.
- Definition Classes
- EmbeddedKafkaStreamsSupport
- val streamsConfig: EmbeddedStreamsConfigImpl
- Attributes
- protected[embeddedkafka]
- Definition Classes
- EmbeddedKafkaStreams → EmbeddedKafkaStreamsSupport
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- val topicCreationTimeout: FiniteDuration
- Attributes
- protected
- Definition Classes
- AdminOps
- val topicDeletionTimeout: FiniteDuration
- Attributes
- protected
- Definition Classes
- AdminOps
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- def withAdminClient[T](body: (AdminClient) => T)(implicit config: EmbeddedKafkaConfig): Try[T]
- Attributes
- protected
- Definition Classes
- AdminOps
- def withConsumer[K, V, T](body: (KafkaConsumer[K, V]) => T)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): T
- Definition Classes
- ConsumerOps
- def withProducer[K, V, T](body: (KafkaProducer[K, V]) => T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], valueSerializer: Serializer[V]): T
- Definition Classes
- ProducerOps
- def withRunningKafka[T](body: => T)(implicit config: EmbeddedKafkaConfig): T
- Definition Classes
- EmbeddedKafkaSupport
- def withRunningKafkaOnFoundPort[T](config: EmbeddedKafkaConfig)(body: (EmbeddedKafkaConfig) => T): T
- Definition Classes
- EmbeddedKafkaSupport
- val zkConnectionTimeoutMs: Int
- Definition Classes
- AdminOps
- val zkSessionTimeoutMs: Int
- Definition Classes
- AdminOps