use of org.apache.kafka.streams.state.StateSerdes in project kafka by apache.
the class ProcessorNodeTest method testMetrics.
@Test
public void testMetrics() {
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(null, null));
final ProcessorNode node = new ProcessorNode("name", new NoOpProcessor(), Collections.emptySet());
node.init(context);
Metrics metrics = context.baseMetrics();
String name = "task." + context.taskId() + "." + node.name();
String[] entities = { "all", name };
String[] latencyOperations = { "process", "punctuate", "create", "destroy" };
String throughputOperation = "forward";
String groupName = "stream-processor-node-metrics";
Map<String, String> tags = Collections.singletonMap("processor-node-id", node.name());
for (String operation : latencyOperations) {
assertNotNull(metrics.getSensor(operation));
assertNotNull(metrics.getSensor(name + "-" + operation));
}
assertNotNull(metrics.getSensor(throughputOperation));
for (String entity : entities) {
for (String operation : latencyOperations) {
assertNotNull(metrics.metrics().get(metrics.metricName(entity + "-" + operation + "-latency-avg", groupName, "The average latency in milliseconds of " + entity + " " + operation + " operation.", tags)));
assertNotNull(metrics.metrics().get(metrics.metricName(entity + "-" + operation + "-latency-max", groupName, "The max latency in milliseconds of " + entity + " " + operation + " operation.", tags)));
assertNotNull(metrics.metrics().get(metrics.metricName(entity + "-" + operation + "-rate", groupName, "The average number of occurrence of " + entity + " " + operation + " operation per second.", tags)));
}
assertNotNull(metrics.metrics().get(metrics.metricName(entity + "-" + throughputOperation + "-rate", groupName, "The average number of occurrence of " + entity + " " + throughputOperation + " operation per second.", tags)));
}
}
use of org.apache.kafka.streams.state.StateSerdes in project kafka by apache.
the class SinkNodeTest method shouldThrowStreamsExceptionOnKeyValueTypeSerializerMismatch.
@Test
@SuppressWarnings("unchecked")
public void shouldThrowStreamsExceptionOnKeyValueTypeSerializerMismatch() {
// Given
final Serializer anySerializer = Serdes.Bytes().serializer();
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null));
context.setTime(0);
final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
sink.init(context);
final String keyOfDifferentTypeThanSerializer = "key with different type";
final String valueOfDifferentTypeThanSerializer = "value with different type";
// When/Then
try {
sink.process(keyOfDifferentTypeThanSerializer, valueOfDifferentTypeThanSerializer);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
}
}
use of org.apache.kafka.streams.state.StateSerdes in project kafka by apache.
the class SinkNodeTest method shouldThrowStreamsExceptionOnInputRecordWithInvalidTimestamp.
@Test
@SuppressWarnings("unchecked")
public void shouldThrowStreamsExceptionOnInputRecordWithInvalidTimestamp() {
// Given
final Serializer anySerializer = Serdes.Bytes().serializer();
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null));
final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
sink.init(context);
final Bytes anyKey = new Bytes("any key".getBytes());
final Bytes anyValue = new Bytes("any value".getBytes());
// When/Then
// ensures a negative timestamp is set for the record we send next
context.setTime(-1);
try {
sink.process(anyKey, anyValue);
fail("Should have thrown StreamsException");
} catch (final StreamsException ignored) {
}
}
use of org.apache.kafka.streams.state.StateSerdes in project kafka by apache.
the class SinkNodeTest method shouldHandleNullValuesWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch.
@Test
@SuppressWarnings("unchecked")
public void shouldHandleNullValuesWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() {
// Given
final Serializer anySerializer = Serdes.Bytes().serializer();
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null));
context.setTime(1);
final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
sink.init(context);
final String invalidKeyToTriggerSerializerMismatch = "";
// When/Then
try {
sink.process(invalidKeyToTriggerSerializerMismatch, null);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString("unknown because value is null"));
}
}
use of org.apache.kafka.streams.state.StateSerdes in project kafka by apache.
the class SinkNodeTest method shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch.
@Test
@SuppressWarnings("unchecked")
public void shouldHandleNullKeysWhenThrowingStreamsExceptionOnKeyValueTypeSerializerMismatch() {
// Given
final Serializer anySerializer = Serdes.Bytes().serializer();
final StateSerdes anyStateSerde = StateSerdes.withBuiltinTypes("anyName", Bytes.class, Bytes.class);
final MockProcessorContext context = new MockProcessorContext(anyStateSerde, new RecordCollectorImpl(new MockProducer<byte[], byte[]>(true, anySerializer, anySerializer), null));
context.setTime(1);
final SinkNode sink = new SinkNode<>("anyNodeName", "any-output-topic", anySerializer, anySerializer, null);
sink.init(context);
final String invalidValueToTriggerSerializerMismatch = "";
// When/Then
try {
sink.process(null, invalidValueToTriggerSerializerMismatch);
fail("Should have thrown StreamsException");
} catch (final StreamsException e) {
assertThat(e.getCause(), instanceOf(ClassCastException.class));
assertThat(e.getMessage(), containsString("unknown because key is null"));
}
}
Aggregations