use of brave.kafka.streams.KafkaStreamsTracingTest.TEST_KEY in project brave by openzipkin.
the class ITKafkaStreamsTracing method should_create_spans_from_stream_with_tracing_foreach.
@Test
public void should_create_spans_from_stream_with_tracing_foreach() {
String inputTopic = testName.getMethodName() + "-input";
StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String())).process(kafkaStreamsTracing.foreach("foreach-1", (key, value) -> {
try {
Thread.sleep(100L);
} catch (InterruptedException e) {
e.printStackTrace();
}
}));
Topology topology = builder.build();
KafkaStreams streams = buildKafkaStreams(topology);
send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
waitForStreamToRun(streams);
MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
assertChildOf(spanProcessor, spanInput);
streams.close();
streams.cleanUp();
}
use of brave.kafka.streams.KafkaStreamsTracingTest.TEST_KEY in project brave by openzipkin.
the class ITKafkaStreamsTracing method should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_true.
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_true() {
String inputTopic = testName.getMethodName() + "-input";
String outputTopic = testName.getMethodName() + "-output";
StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String())).transformValues(kafkaStreamsTracing.markAsFiltered("filter-1", (key, value) -> true)).filterNot((k, v) -> Objects.isNull(v)).to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
Topology topology = builder.build();
KafkaStreams streams = buildKafkaStreams(topology);
send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
waitForStreamToRun(streams);
MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
assertChildOf(spanProcessor, spanInput);
assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");
// the filter transformer returns true so record is not dropped
MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
assertChildOf(spanOutput, spanProcessor);
streams.close();
streams.cleanUp();
}
use of brave.kafka.streams.KafkaStreamsTracingTest.TEST_KEY in project brave by openzipkin.
the class ITKafkaStreamsTracing method should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_false.
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_filtered_predicate_false() {
String inputTopic = testName.getMethodName() + "-input";
String outputTopic = testName.getMethodName() + "-output";
StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String())).transformValues(kafkaStreamsTracing.markAsFiltered("filter-2", (key, value) -> false)).filterNot((k, v) -> Objects.isNull(v)).to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
Topology topology = builder.build();
KafkaStreams streams = buildKafkaStreams(topology);
send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
waitForStreamToRun(streams);
MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
assertChildOf(spanProcessor, spanInput);
assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "true");
// the filter transformer returns false so record is dropped
streams.close();
streams.cleanUp();
}
use of brave.kafka.streams.KafkaStreamsTracingTest.TEST_KEY in project brave by openzipkin.
the class ITKafkaStreamsTracing method should_create_spans_and_propagate_extra_from_stream_with_multi_processor.
@Test
public void should_create_spans_and_propagate_extra_from_stream_with_multi_processor() {
String inputTopic = testName.getMethodName() + "-input";
String outputTopic = testName.getMethodName() + "-output";
StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String())).transformValues(kafkaStreamsTracing.peek("transform1", (o, o2) -> {
TraceContext context = currentTraceContext.get();
assertThat(BAGGAGE_FIELD.getValue(context)).isEqualTo("user1");
BAGGAGE_FIELD.updateValue(context, "user2");
})).transformValues(kafkaStreamsTracing.peek("transform2", (s, s2) -> {
TraceContext context = currentTraceContext.get();
assertThat(BAGGAGE_FIELD.getValue(context)).isEqualTo("user2");
})).to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
Topology topology = builder.build();
KafkaStreams streams = buildKafkaStreams(topology);
ProducerRecord<String, String> record = new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE);
record.headers().add(BAGGAGE_FIELD_KEY, "user1".getBytes());
send(record);
waitForStreamToRun(streams);
MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
MutableSpan spanTransform1 = testSpanHandler.takeLocalSpan();
assertChildOf(spanTransform1, spanInput);
MutableSpan spanTransform2 = testSpanHandler.takeLocalSpan();
assertChildOf(spanTransform2, spanTransform1);
MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
assertChildOf(spanOutput, spanTransform2);
streams.close();
streams.cleanUp();
}
use of brave.kafka.streams.KafkaStreamsTracingTest.TEST_KEY in project brave by openzipkin.
the class ITKafkaStreamsTracing method should_create_spans_from_stream_with_tracing_mark_as_not_filtered_predicate_false.
@Test
public void should_create_spans_from_stream_with_tracing_mark_as_not_filtered_predicate_false() {
String inputTopic = testName.getMethodName() + "-input";
String outputTopic = testName.getMethodName() + "-output";
StreamsBuilder builder = new StreamsBuilder();
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String())).transformValues(kafkaStreamsTracing.markAsNotFiltered("filterNot-2", (key, value) -> false)).filterNot((k, v) -> Objects.isNull(v)).to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
Topology topology = builder.build();
KafkaStreams streams = buildKafkaStreams(topology);
send(new ProducerRecord<>(inputTopic, TEST_KEY, TEST_VALUE));
waitForStreamToRun(streams);
MutableSpan spanInput = testSpanHandler.takeRemoteSpan(CONSUMER);
assertThat(spanInput.tags()).containsEntry("kafka.topic", inputTopic);
MutableSpan spanProcessor = testSpanHandler.takeLocalSpan();
assertChildOf(spanProcessor, spanInput);
assertThat(spanProcessor.tags()).containsEntry(KAFKA_STREAMS_FILTERED_TAG, "false");
// the filter transformer returns true so record is not dropped
MutableSpan spanOutput = testSpanHandler.takeRemoteSpan(PRODUCER);
assertThat(spanOutput.tags()).containsEntry("kafka.topic", outputTopic);
assertChildOf(spanOutput, spanProcessor);
streams.close();
streams.cleanUp();
}
Aggregations