use of org.apache.kafka.streams.KafkaClientSupplier in project kafka by apache.
the class StreamsProducerTest method shouldThrowTimeoutExceptionOnEosInitTxTimeout.
@Test
public void shouldThrowTimeoutExceptionOnEosInitTxTimeout() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new TimeoutException("KABOOM!");
final KafkaClientSupplier clientSupplier = new MockClientSupplier() {
@Override
public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) {
return nonEosMockProducer;
}
};
final StreamsProducer streamsProducer = new StreamsProducer(eosAlphaConfig, "threadId", clientSupplier, new TaskId(0, 0), null, logContext, mockTime);
final TimeoutException thrown = assertThrows(TimeoutException.class, streamsProducer::initTransaction);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
use of org.apache.kafka.streams.KafkaClientSupplier in project kafka by apache.
the class StreamsUpgradeTest method buildStreams.
public static KafkaStreams buildStreams(final Properties streamsProperties) {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Void, Void> dataStream = builder.stream("data");
dataStream.process(SmokeTestUtil.printProcessorSupplier("data"));
dataStream.to("echo");
final Properties config = new Properties();
config.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "StreamsUpgradeTest");
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000L);
config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.ByteArraySerde.class.getName());
final KafkaClientSupplier kafkaClientSupplier;
if (streamsProperties.containsKey("test.future.metadata")) {
kafkaClientSupplier = new FutureKafkaClientSupplier();
} else {
kafkaClientSupplier = new DefaultKafkaClientSupplier();
}
config.putAll(streamsProperties);
return new KafkaStreams(builder.build(), config, kafkaClientSupplier);
}
use of org.apache.kafka.streams.KafkaClientSupplier in project kafka by apache.
the class StreamsProducerTest method shouldThrowStreamsExceptionOnEosInitError.
@Test
public void shouldThrowStreamsExceptionOnEosInitError() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new KafkaException("KABOOM!");
final KafkaClientSupplier clientSupplier = new MockClientSupplier() {
@Override
public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) {
return nonEosMockProducer;
}
};
final StreamsProducer streamsProducer = new StreamsProducer(eosAlphaConfig, "threadId", clientSupplier, new TaskId(0, 0), null, logContext, mockTime);
final StreamsException thrown = assertThrows(StreamsException.class, streamsProducer::initTransaction);
assertThat(thrown.getCause(), is(nonEosMockProducer.initTransactionException));
assertThat(thrown.getMessage(), is("Error encountered trying to initialize transactions [test]"));
}
use of org.apache.kafka.streams.KafkaClientSupplier in project kafka by apache.
the class StreamsProducerTest method shouldFailOnEosInitFatal.
@Test
public void shouldFailOnEosInitFatal() {
// use `nonEosMockProducer` instead of `eosMockProducer` to avoid double Tx-Init
nonEosMockProducer.initTransactionException = new RuntimeException("KABOOM!");
final KafkaClientSupplier clientSupplier = new MockClientSupplier() {
@Override
public Producer<byte[], byte[]> getProducer(final Map<String, Object> config) {
return nonEosMockProducer;
}
};
final StreamsProducer streamsProducer = new StreamsProducer(eosAlphaConfig, "threadId", clientSupplier, new TaskId(0, 0), null, logContext, mockTime);
final RuntimeException thrown = assertThrows(RuntimeException.class, streamsProducer::initTransaction);
assertThat(thrown.getMessage(), is("KABOOM!"));
}
Aggregations