use of org.apache.kafka.test.MockSourceNode in project kafka by apache.
the class StreamTaskTest method shouldWrapKafkaExceptionsWithStreamsExceptionAndAddContext.
@SuppressWarnings("unchecked")
@Test
public void shouldWrapKafkaExceptionsWithStreamsExceptionAndAddContext() throws Exception {
final MockSourceNode processorNode = new MockSourceNode(topic1, intDeserializer, intDeserializer) {
@Override
public void process(final Object key, final Object value) {
throw new KafkaException("KABOOM!");
}
};
final List<ProcessorNode> processorNodes = Collections.<ProcessorNode>singletonList(processorNode);
final Map<String, SourceNode> sourceNodes = Collections.<String, SourceNode>singletonMap(topic1[0], processorNode);
final ProcessorTopology topology = new ProcessorTopology(processorNodes, sourceNodes, Collections.<String, SinkNode>emptyMap(), Collections.<StateStore>emptyList(), Collections.<String, String>emptyMap(), Collections.<StateStore>emptyList());
task.close();
task = new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, testCache, time, recordCollector);
final int offset = 20;
task.addRecords(partition1, Collections.singletonList(new ConsumerRecord<>(partition1.topic(), partition1.partition(), offset, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, recordKey, recordValue)));
try {
task.process();
fail("Should've thrown StreamsException");
} catch (StreamsException e) {
final String message = e.getMessage();
assertTrue("message=" + message + " should contain topic", message.contains("topic=" + topic1[0]));
assertTrue("message=" + message + " should contain partition", message.contains("partition=" + partition1.partition()));
assertTrue("message=" + message + " should contain offset", message.contains("offset=" + offset));
assertTrue("message=" + message + " should contain processor", message.contains("processor=" + processorNode.name()));
}
}
use of org.apache.kafka.test.MockSourceNode in project kafka by apache.
the class StreamTaskTest method createTaskThatThrowsExceptionOnClose.
@SuppressWarnings("unchecked")
private StreamTask createTaskThatThrowsExceptionOnClose() {
final MockSourceNode processorNode = new MockSourceNode(topic1, intDeserializer, intDeserializer) {
@Override
public void close() {
throw new RuntimeException("KABOOM!");
}
};
final List<ProcessorNode> processorNodes = Arrays.asList(processorNode, processor, source1, source2);
final Map<String, SourceNode> sourceNodes = Collections.<String, SourceNode>singletonMap(topic1[0], processorNode);
final ProcessorTopology topology = new ProcessorTopology(processorNodes, sourceNodes, Collections.<String, SinkNode>emptyMap(), Collections.<StateStore>emptyList(), Collections.<String, String>emptyMap(), Collections.<StateStore>emptyList());
return new StreamTask(taskId00, applicationId, partitions, topology, consumer, changelogReader, config, streamsMetrics, stateDirectory, testCache, time, recordCollector);
}
use of org.apache.kafka.test.MockSourceNode in project apache-kafka-on-k8s by banzaicloud.
the class SourceNodeTest method shouldProvideTopicHeadersAndDataToValueDeserializer.
@Test
public void shouldProvideTopicHeadersAndDataToValueDeserializer() {
final SourceNode<String, String> sourceNode = new MockSourceNode<>(new String[] { "" }, new TheExtendedDeserializer(), new TheExtendedDeserializer());
final RecordHeaders headers = new RecordHeaders();
final String deserializedValue = sourceNode.deserializeValue("topic", headers, "data".getBytes(StandardCharsets.UTF_8));
assertThat(deserializedValue, is("topic" + headers + "data"));
}
use of org.apache.kafka.test.MockSourceNode in project kafka by apache.
the class StreamTaskTest method shouldRecordE2ELatencyOnSourceNodeAndTerminalNodes.
@Test
public void shouldRecordE2ELatencyOnSourceNodeAndTerminalNodes() {
time = new MockTime(0L, 0L, 0L);
metrics = new Metrics(new MetricConfig().recordLevel(Sensor.RecordingLevel.INFO), time);
// Create a processor that only forwards even keys to test the metrics at the source and terminal nodes
final MockSourceNode<Integer, Integer> evenKeyForwardingSourceNode = new MockSourceNode<Integer, Integer>(intDeserializer, intDeserializer) {
InternalProcessorContext<Integer, Integer> context;
@Override
public void init(final InternalProcessorContext<Integer, Integer> context) {
this.context = context;
super.init(context);
}
@Override
public void process(final Record<Integer, Integer> record) {
if (record.key() % 2 == 0) {
context.forward(record);
}
}
};
task = createStatelessTaskWithForwardingTopology(evenKeyForwardingSourceNode);
task.initializeIfNeeded();
task.completeRestoration(noOpResetter -> {
});
final String sourceNodeName = evenKeyForwardingSourceNode.name();
final String terminalNodeName = processorStreamTime.name();
final Metric sourceAvg = getProcessorMetric("record-e2e-latency", "%s-avg", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric sourceMin = getProcessorMetric("record-e2e-latency", "%s-min", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric sourceMax = getProcessorMetric("record-e2e-latency", "%s-max", task.id().toString(), sourceNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalAvg = getProcessorMetric("record-e2e-latency", "%s-avg", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalMin = getProcessorMetric("record-e2e-latency", "%s-min", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
final Metric terminalMax = getProcessorMetric("record-e2e-latency", "%s-max", task.id().toString(), terminalNodeName, StreamsConfig.METRICS_LATEST);
// e2e latency = 10
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(0, 0L)));
task.process(10L);
assertThat(sourceAvg.metricValue(), equalTo(10.0));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(10.0));
// key 0: reaches terminal node
assertThat(terminalAvg.metricValue(), equalTo(10.0));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(10.0));
// e2e latency = 15
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(1, 0L)));
task.process(15L);
assertThat(sourceAvg.metricValue(), equalTo(12.5));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(15.0));
// key 1: stops at source, doesn't affect terminal node metrics
assertThat(terminalAvg.metricValue(), equalTo(10.0));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(10.0));
// e2e latency = 23
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(2, 0L)));
task.process(23L);
assertThat(sourceAvg.metricValue(), equalTo(16.0));
assertThat(sourceMin.metricValue(), equalTo(10.0));
assertThat(sourceMax.metricValue(), equalTo(23.0));
// key 2: reaches terminal node
assertThat(terminalAvg.metricValue(), equalTo(16.5));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(23.0));
// e2e latency = 5
task.addRecords(partition1, singletonList(getConsumerRecordWithOffsetAsTimestamp(3, 0L)));
task.process(5L);
assertThat(sourceAvg.metricValue(), equalTo(13.25));
assertThat(sourceMin.metricValue(), equalTo(5.0));
assertThat(sourceMax.metricValue(), equalTo(23.0));
// key 3: stops at source, doesn't affect terminal node metrics
assertThat(terminalAvg.metricValue(), equalTo(16.5));
assertThat(terminalMin.metricValue(), equalTo(10.0));
assertThat(terminalMax.metricValue(), equalTo(23.0));
}
use of org.apache.kafka.test.MockSourceNode in project kafka by apache.
the class SourceNodeTest method shouldProvideTopicHeadersAndDataToKeyDeserializer.
@Test
public void shouldProvideTopicHeadersAndDataToKeyDeserializer() {
final SourceNode<String, String> sourceNode = new MockSourceNode<>(new TheDeserializer(), new TheDeserializer());
final RecordHeaders headers = new RecordHeaders();
final String deserializeKey = sourceNode.deserializeKey("topic", headers, "data".getBytes(StandardCharsets.UTF_8));
assertThat(deserializeKey, is("topic" + headers + "data"));
}
Aggregations