object EmbeddedKafka extends EmbeddedKafkaSupport
- Alphabetic
- By Inheritance
- EmbeddedKafka
- EmbeddedKafkaSupport
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
consumeFirstMessageFrom[T](topic: String, autoCommit: Boolean = false)(implicit config: EmbeddedKafkaConfig, deserializer: Deserializer[T]): T
Consumes the first message available in a given topic, deserializing it as a String.
Consumes the first message available in a given topic, deserializing it as a String.
Only the messsage that is returned is committed if autoCommit is false. If autoCommit is true then all messages that were polled will be committed.
- topic
the topic to consume a message from
- autoCommit
if false, only the offset for the consumed message will be commited. if true, the offset for the last polled message will be committed instead. Defaulted to false.
- config
an implicit EmbeddedKafkaConfig
- deserializer
an implicit org.apache.kafka.common.serialization.Deserializer for the type T
- returns
the first message consumed from the given topic, with a type T
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( classOf[TimeoutException] ) @throws( classOf[KafkaUnavailableException] )
- Exceptions thrown
KafkaUnavailableException
if unable to connect to KafkaTimeoutException
if unable to consume a message within 5 seconds
-
def
consumeFirstStringMessageFrom(topic: String, autoCommit: Boolean = false)(implicit config: EmbeddedKafkaConfig): String
- Definition Classes
- EmbeddedKafkaSupport
-
def
createCustomTopic(topic: String, topicConfig: Map[String, String] = Map.empty, partitions: Int = 1, replicationFactor: Int = 1)(implicit config: EmbeddedKafkaConfig): Unit
Creates a topic with a custom configuration
Creates a topic with a custom configuration
- topic
the topic name
- topicConfig
per topic configuration Map
- partitions
number of partitions Int
- replicationFactor
replication factor Int
- config
an implicit EmbeddedKafkaConfig
- Definition Classes
- EmbeddedKafkaSupport
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[java.lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isRunning: Boolean
Returns whether the in memory Kafka and Zookeeper are running.
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
-
def
publishStringMessageToKafka(topic: String, message: String)(implicit config: EmbeddedKafkaConfig): Unit
Publishes synchronously a message of type String to the running Kafka broker.
Publishes synchronously a message of type String to the running Kafka broker.
- topic
the topic to which publish the message (it will be auto-created)
- message
the String message to publish
- config
an implicit EmbeddedKafkaConfig
- Definition Classes
- EmbeddedKafkaSupport
- Exceptions thrown
KafkaUnavailableException
if unable to connect to Kafka- See also
-
def
publishToKafka[K, T](topic: String, key: K, message: T)(implicit config: EmbeddedKafkaConfig, keySerializer: Serializer[K], serializer: Serializer[T]): Unit
Publishes synchronously a message to the running Kafka broker.
Publishes synchronously a message to the running Kafka broker.
- topic
the topic to which publish the message (it will be auto-created)
- key
the key of type K to publish
- message
the message of type T to publish
- config
an implicit EmbeddedKafkaConfig
- serializer
an implicit Serializer for the type T
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( classOf[KafkaUnavailableException] )
- Exceptions thrown
KafkaUnavailableException
if unable to connect to Kafka
-
def
publishToKafka[T](topic: String, message: T)(implicit config: EmbeddedKafkaConfig, serializer: Serializer[T]): Unit
Publishes synchronously a message to the running Kafka broker.
Publishes synchronously a message to the running Kafka broker.
- topic
the topic to which publish the message (it will be auto-created)
- message
the message of type T to publish
- config
an implicit EmbeddedKafkaConfig
- serializer
an implicit Serializer for the type T
- Definition Classes
- EmbeddedKafkaSupport
- Annotations
- @throws( classOf[KafkaUnavailableException] )
- Exceptions thrown
KafkaUnavailableException
if unable to connect to Kafka
-
def
start()(implicit config: EmbeddedKafkaConfig): Unit
Starts a ZooKeeper instance and a Kafka broker in memory, using temporary directories for storing logs.
Starts a ZooKeeper instance and a Kafka broker in memory, using temporary directories for storing logs. The log directories will be cleaned after calling the stop() method or on JVM exit, whichever happens earlier.
- config
an implicit EmbeddedKafkaConfig
-
def
startKafka(kafkaLogDir: Directory)(implicit config: EmbeddedKafkaConfig): Unit
Starts a Kafka broker in memory, storing logs in a specific location.
Starts a Kafka broker in memory, storing logs in a specific location.
- kafkaLogDir
the path for the Kafka logs
- config
an implicit EmbeddedKafkaConfig
-
def
startKafka(config: EmbeddedKafkaConfig, kafkaLogDir: Directory): KafkaServer
- Definition Classes
- EmbeddedKafkaSupport
-
def
startZooKeeper(zkLogsDir: Directory)(implicit config: EmbeddedKafkaConfig): Unit
Starts a Zookeeper instance in memory, storing logs in a specific location.
Starts a Zookeeper instance in memory, storing logs in a specific location.
- zkLogsDir
the path for the Zookeeper logs
- config
an implicit EmbeddedKafkaConfig
-
def
startZooKeeper(zooKeeperPort: Int, zkLogsDir: Directory): ServerCnxnFactory
- Definition Classes
- EmbeddedKafkaSupport
-
def
stop(): Unit
Stops the in memory ZooKeeper instance and Kafka broker, and deletes the log directories.
-
def
stopKafka(): Unit
Stops the in memory Kafka instance, preserving the logs directory.
-
def
stopZooKeeper(): Unit
Stops the in memory Zookeeper instance, preserving the logs directory.
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
withRunningKafka(body: ⇒ Any)(implicit config: EmbeddedKafkaConfig): Any
Starts a ZooKeeper instance and a Kafka broker, then executes the body passed as a parameter.
Starts a ZooKeeper instance and a Kafka broker, then executes the body passed as a parameter.
- body
the function to execute
- config
an implicit EmbeddedKafkaConfig
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkConnectionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSecurityEnabled: Boolean
- Definition Classes
- EmbeddedKafkaSupport
-
val
zkSessionTimeoutMs: Int
- Definition Classes
- EmbeddedKafkaSupport
-
object
aKafkaProducer
- Definition Classes
- EmbeddedKafkaSupport