trait EmbeddedKafkaStreamsAllInOne extends EmbeddedKafkaStreams with Consumers
Convenience trait for testing Kafka Streams with ScalaTest.
It exposes EmbeddedKafkaStreams.runStreams
as well as Consumers
api
for easily creating and querying consumers in tests.
e.g.
runStreams(Seq("inputTopic", "outputTopic", streamTopology) { withConsumer[String, String, Unit] { consumer => // here you can publish and consume messages and make assertions publishToKafka(in, Seq("one-string", "another-string")) consumeLazily(out).take(2).toList should be ( Seq("one-string" -> "true", "another-string" -> "true") ) } }
- Self Type
- EmbeddedKafkaStreamsAllInOne with Suite
- See also
Consumers
- Alphabetic
- By Inheritance
- EmbeddedKafkaStreamsAllInOne
- Consumers
- EmbeddedKafkaStreams
- TestStreamsConfig
- EmbeddedKafka
- EmbeddedKafkaSupport
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
consumeFirstKeyedMessageFrom[K, V](topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): (K, V)
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... ) @throws( ... )
-
def
consumeFirstMessageFrom[V](topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): V
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... ) @throws( ... )
-
def
consumeFirstStringMessageFrom(topic: String, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig): String
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberKeyedMessagesFrom[K, V](topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): List[(K, V)]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberKeyedMessagesFromTopics[K, V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V]): Map[String, List[(K, V)]]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberMessagesFrom[V](topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): List[V]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberMessagesFromTopics[V](topics: Set[String], number: Int, autoCommit: Boolean, timeout: Duration, resetTimeoutOnEachMessage: Boolean)(implicit config: EmbeddedKafkaConfig, valueDeserializer: Deserializer[V]): Map[String, List[V]]
- Definition Classes
- EmbeddedKafkaSupport
-
def
consumeNumberStringMessagesFrom(topic: String, number: Int, autoCommit: Boolean)(implicit config: EmbeddedKafkaConfig): List[String]
- Definition Classes
- EmbeddedKafkaSupport
-
def
createCustomTopic(topic: String, topicConfig: Map[String, String], partitions: Int, replicationFactor: Int)(implicit config: EmbeddedKafkaConfig): Unit
- Definition Classes
- EmbeddedKafkaSupport
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
kafkaConsumer[K, T](implicit config: EmbeddedKafkaConfig, keyDeserializer: Deserializer[K], deserializer: Deserializer[T]): KafkaConsumer[K, T]
- Definition Classes
- EmbeddedKafkaSupport
-
def
kafkaProducer[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): KafkaProducer[K, T]
- Definition Classes
- EmbeddedKafkaSupport
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
newConsumer[K, V]()(implicit arg0: Deserializer[K], arg1: Deserializer[V], config: EmbeddedKafkaConfig): KafkaConsumer[K, V]
- Definition Classes
- Consumers
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
publishStringMessageToKafka(topic: String, message: String)(implicit config: EmbeddedKafkaConfig): Unit
- Definition Classes
- EmbeddedKafkaSupport
-
def
publishToKafka[K, T](topic: String, messages: Seq[(K, T)])(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
publishToKafka[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
publishToKafka[T](topic: String, message: T)(implicit config: EmbeddedKafkaConfig, serializer: Serializer[T]): Unit
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( ... )
-
def
runStreams(topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: ⇒ Any)(implicit config: EmbeddedKafkaConfig): Any
Execute Kafka streams and pass a block of code that can operate while the streams are active.
Execute Kafka streams and pass a block of code that can operate while the streams are active. The code block can be used for publishing and consuming messages in Kafka. The block gets a pre-initialized kafka consumer that can be used implicitly for util methods such as
consumeLazily(String)
.e.g.
runStreams(Seq("inputTopic", "outputTopic", topology) { // here you can publish and consume messages and make assertions publishToKafka(in, Seq("one-string", "another-string")) consumeFirstStringMessageFrom(in) should be ("one-string") }
- topicsToCreate
the topics that should be created in Kafka before launching the streams.
- topology
the streams topology that will be used to instantiate the streams with a default configuration (all state directories are different and in temp folders)
- extraConfig
additional KafkaStreams configuration (overwrite existing keys in default config)
- block
the code block that will executed while the streams are active. Once the block has been executed the streams will be closed.
- Definition Classes
- EmbeddedKafkaStreams
-
def
runStreamsWithStringConsumer(topicsToCreate: Seq[String], topology: Topology)(block: (KafkaConsumer[String, String]) ⇒ Any)(implicit config: EmbeddedKafkaConfig): Any
Run Kafka Streams while offering a String-based consumer for easy testing of stream output.
Run Kafka Streams while offering a String-based consumer for easy testing of stream output.
- topicsToCreate
the topics that should be created. Usually these should be the topics that the Streams-under-test use for inputs and outputs. They need to be created before running the streams and this is automatically taken care of.
- topology
the streams topology that will be instantiated
- block
the block of testing code that will be executed by passing the simple String-based consumer.
- returns
the result of the testing code
-
def
startKafka(config: EmbeddedKafkaConfig, kafkaLogDir: Directory): KafkaServer
- Definition Classes
- EmbeddedKafkaSupport
-
def
startZooKeeper(zooKeeperPort: Int, zkLogsDir: Directory): ServerCnxnFactory
- Definition Classes
- EmbeddedKafkaSupport
-
def
streamConfig(streamName: String, extraConfig: Map[String, AnyRef] = Map.empty)(implicit kafkaConfig: EmbeddedKafkaConfig): StreamsConfig
Create a test stream config for a given stream.
Create a test stream config for a given stream.
- streamName
the name of the stream. It will be used as the Application ID
- extraConfig
any additional configuration. If the keys are already defined in the default they will be overwritten with this
- kafkaConfig
the Kafka test configuration
- returns
the Streams configuration
- Definition Classes
- TestStreamsConfig
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @throws( ... )
-
def
withConsumer[K, V, T](block: (KafkaConsumer[K, V]) ⇒ T)(implicit arg0: Deserializer[K], arg1: Deserializer[V], config: EmbeddedKafkaConfig): T
- Definition Classes
- Consumers
-
def
withRunningKafka[T](body: ⇒ T)(implicit config: EmbeddedKafkaConfig): T
- Definition Classes
- EmbeddedKafkaSupport
-
def
withRunningKafkaOnFoundPort[T](config: EmbeddedKafkaConfig)(body: (EmbeddedKafkaConfig) ⇒ T): T
- Definition Classes
- EmbeddedKafkaSupport
-
def
withStringConsumer[T](block: (KafkaConsumer[String, String]) ⇒ T)(implicit config: EmbeddedKafkaConfig): T
- Definition Classes
- Consumers
-
val
zkConnectionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSecurityEnabled: Boolean
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSessionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport