use of org.apache.kafka.streams.errors.AlwaysContinueProductionExceptionHandler in project apache-kafka-on-k8s by banzaicloud.
the class RecordCollectorTest method shouldNotThrowStreamsExceptionOnFlushIfASendFailedWithContinueExceptionHandler.
@SuppressWarnings("unchecked")
@Test
public void shouldNotThrowStreamsExceptionOnFlushIfASendFailedWithContinueExceptionHandler() {
final RecordCollector collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
callback.onCompletion(null, new Exception());
return null;
}
}, "test", logContext, new AlwaysContinueProductionExceptionHandler());
collector.send("topic1", "3", "0", null, stringSerializer, stringSerializer, streamPartitioner);
collector.flush();
}
use of org.apache.kafka.streams.errors.AlwaysContinueProductionExceptionHandler in project apache-kafka-on-k8s by banzaicloud.
the class RecordCollectorTest method shouldThrowIfTopicIsUnknownWithContinueExceptionHandler.
@SuppressWarnings("unchecked")
@Test(expected = StreamsException.class)
public void shouldThrowIfTopicIsUnknownWithContinueExceptionHandler() {
final RecordCollector collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public List<PartitionInfo> partitionsFor(final String topic) {
return Collections.EMPTY_LIST;
}
}, "test", logContext, new AlwaysContinueProductionExceptionHandler());
collector.send("topic1", "3", "0", null, stringSerializer, stringSerializer, streamPartitioner);
}
use of org.apache.kafka.streams.errors.AlwaysContinueProductionExceptionHandler in project kafka by apache.
the class RecordCollectorTest method shouldThrowStreamsExceptionOnSubsequentCloseIfFatalEvenWithContinueExceptionHandler.
@Test
public void shouldThrowStreamsExceptionOnSubsequentCloseIfFatalEvenWithContinueExceptionHandler() {
final KafkaException exception = new AuthenticationException("KABOOM!");
final RecordCollector collector = new RecordCollectorImpl(logContext, taskId, getExceptionalStreamsProducerOnSend(exception), new AlwaysContinueProductionExceptionHandler(), streamsMetrics);
collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
final StreamsException thrown = assertThrows(StreamsException.class, collector::closeClean);
assertEquals(exception, thrown.getCause());
assertThat(thrown.getMessage(), equalTo("Error encountered sending record to topic topic for task 0_0 due to:" + "\norg.apache.kafka.common.errors.AuthenticationException: KABOOM!" + "\nWritten offsets would not be recorded and no more records would be sent since this is a fatal error."));
}
use of org.apache.kafka.streams.errors.AlwaysContinueProductionExceptionHandler in project kafka by apache.
the class RecordCollectorTest method shouldThrowStreamsExceptionOnSubsequentSendIfFatalEvenWithContinueExceptionHandler.
@Test
public void shouldThrowStreamsExceptionOnSubsequentSendIfFatalEvenWithContinueExceptionHandler() {
final KafkaException exception = new AuthenticationException("KABOOM!");
final RecordCollector collector = new RecordCollectorImpl(logContext, taskId, getExceptionalStreamsProducerOnSend(exception), new AlwaysContinueProductionExceptionHandler(), streamsMetrics);
collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
final StreamsException thrown = assertThrows(StreamsException.class, () -> collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner));
assertEquals(exception, thrown.getCause());
assertThat(thrown.getMessage(), equalTo("Error encountered sending record to topic topic for task 0_0 due to:" + "\norg.apache.kafka.common.errors.AuthenticationException: KABOOM!" + "\nWritten offsets would not be recorded and no more records would be sent since this is a fatal error."));
}
use of org.apache.kafka.streams.errors.AlwaysContinueProductionExceptionHandler in project apache-kafka-on-k8s by banzaicloud.
the class RecordCollectorTest method shouldNotThrowStreamsExceptionOnCloseIfASendFailedWithContinueExceptionHandler.
@SuppressWarnings("unchecked")
@Test
public void shouldNotThrowStreamsExceptionOnCloseIfASendFailedWithContinueExceptionHandler() {
final RecordCollector collector = new RecordCollectorImpl(new MockProducer(cluster, true, new DefaultPartitioner(), byteArraySerializer, byteArraySerializer) {
@Override
public synchronized Future<RecordMetadata> send(final ProducerRecord record, final Callback callback) {
callback.onCompletion(null, new Exception());
return null;
}
}, "test", logContext, new AlwaysContinueProductionExceptionHandler());
collector.send("topic1", "3", "0", null, stringSerializer, stringSerializer, streamPartitioner);
collector.close();
}
Aggregations