trait EmbeddedKafkaStreams extends EmbeddedKafka with TestStreamsConfig
Helper trait for testing Kafka Streams.
It creates an embedded Kafka Instance for each test case.
Use runStreams
to execute your streams.
- Self Type
- EmbeddedKafkaStreams with Suite
- Alphabetic
- By Inheritance
- EmbeddedKafkaStreams
- TestStreamsConfig
- EmbeddedKafka
- EmbeddedKafkaSupport
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
consumeFirstKeyedMessageFrom[K, V](topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): (K, V)
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... ) @throws( ... )
-
def
consumeFirstMessageFrom[V](topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): V
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... ) @throws( ... )
-
def
consumeFirstStringMessageFrom(topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig): String
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberKeyedMessagesFrom[K, V](topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): List[(K, V)]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberKeyedMessagesFromTopics[K, V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): Map[String, List[(K, V)]]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberMessagesFrom[V](topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): List[V]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberMessagesFromTopics[V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): Map[String, List[V]]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberStringMessagesFrom(topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig): List[String]
- Definition Classes
- EmbeddedKafkaSupport
-
def
createCustomTopic(topic: String, topicConfig: Map[String, String], partitions: Int, replicationFactor: Int)(implicit config: EmbeddedKafkaConfig): Unit
- Definition Classes
- EmbeddedKafkaSupport
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
kafkaConsumer[K, T](implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], deserializer: Deserializer[T]): KafkaConsumer[K, T]
- Definition Classes
- EmbeddedKafkaSupport
-
def
kafkaProducer[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): KafkaProducer[K, T]
- Definition Classes
- EmbeddedKafkaSupport
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
publishStringMessageToKafka(topic: String, message: String)(implicit config: EmbeddedKafkaConfig): Unit
- Definition Classes
- EmbeddedKafkaSupport
-
def
publishToKafka[K, T](topic: String, messages: Seq[(K, T)])(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
publishToKafka[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
publishToKafka[T](topic: String, message: T)(implicit config: EmbeddedKafkaConfig, serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
runStreams(topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: ⇒ Any)(implicit config: EmbeddedKafkaConfig): Any
Execute Kafka streams and pass a block of code that can operate while the streams are active.
Execute Kafka streams and pass a block of code that can operate while the streams are active. The code block can be used for publishing and consuming messages in Kafka. The block gets a pre-initialized kafka consumer that can be used implicitly for util methods such as
consumeLazily(String)
.e.g.
runStreams(Seq("inputTopic", "outputTopic", topology) { // here you can publish and consume messages and make assertions publishToKafka(in, Seq("one-string", "another-string")) consumeFirstStringMessageFrom(in) should be ("one-string") }
- topicsToCreate
the topics that should be created in Kafka before launching the streams.
- topology
the streams topology that will be used to instantiate the streams with a default configuration (all state directories are different and in temp folders)
- extraConfig
additional KafkaStreams configuration (overwrite existing keys in default config)
- block
the code block that will executed while the streams are active. Once the block has been executed the streams will be closed.
-
def
startKafka(config: EmbeddedKafkaConfig, kafkaLogDir: Directory): KafkaServer
- Definition Classes
- EmbeddedKafkaSupport
-
def
startZooKeeper(zooKeeperPort: Int, zkLogsDir: Directory): ServerCnxnFactory
- Definition Classes
- EmbeddedKafkaSupport
-
def
streamConfig(streamName: String, extraConfig: Map[String, AnyRef] = Map.empty)(implicit kafkaConfig: EmbeddedKafkaConfig): StreamsConfig
Create a test stream config for a given stream.
Create a test stream config for a given stream.
- streamName
the name of the stream. It will be used as the Application ID
- extraConfig
any additional configuration. If the keys are already defined in the default they will be overwritten with this
- kafkaConfig
the Kafka test configuration
- returns
the Streams configuration
- Definition Classes
- TestStreamsConfig
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
withRunningKafka[T](body: ⇒ T)(implicit config: EmbeddedKafkaConfig): T
- Definition Classes
- EmbeddedKafkaSupport
-
def
withRunningKafkaOnFoundPort[T](config: EmbeddedKafkaConfig)(body: (EmbeddedKafkaConfig) ⇒ T): T
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkConnectionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSecurityEnabled: Boolean
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSessionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport