use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.
the class SinkNodeTest method shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch.
@Test
@SuppressWarnings("unchecked")
public void shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() {
// Given
final Serializer anySerializer = Serdes.Bytes().serializer();
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null));
context.setTime(1);
final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
sink.init(context);
final String invalidValueToTriggerSerializerMismatch = "";
// When/Then
try {
sink.process(null, invalidValueToTriggerSerializerMismatch);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString("unknown because key is null"));
}
}
use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.
the class RecordCollectorTest method shouldThrowStreamsExceptionAfterMaxAttempts.
@SuppressWarnings("unchecked")
@Test(expected = StreamsException.class)
public void shouldThrowStreamsExceptionAfterMaxAttempts() throws Exception {
RecordCollector collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
throw new TimeoutException();
}
}, "test");
collector.send("topic1", "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
}
use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.
the class RecordCollectorTest method shouldThrowStreamsExceptionOnFlushIfASendFailed.
@SuppressWarnings("unchecked")
@Test(expected = StreamsException.class)
public void shouldThrowStreamsExceptionOnFlushIfASendFailed() throws Exception {
final RecordCollector collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
callback.onCompletion(null, new Exception());
return null;
}
}, "test");
collector.send("topic1", "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.flush();
}
use of org.apache.kafka.clients.producer.MockProducer in project kafka by apache.
the class RecordCollectorTest method shouldRetryWhenTimeoutExceptionOccursOnSend.
@SuppressWarnings("unchecked")
@Test
public void shouldRetryWhenTimeoutExceptionOccursOnSend() throws Exception {
final AtomicInteger attempt = new AtomicInteger(0);
RecordCollectorImpl collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
if (attempt.getAndIncrement() == 0) {
throw new TimeoutException();
}
return super.send(record, callback);
}
}, "test");
collector.send("topic1", "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
final Long offset = collector.offsets().get(new TopicPartition("topic1", 0));
assertEquals(Long.valueOf(0L), offset);
}
Aggregations