use of io.smallrye.reactive.messaging.kafka.companion.ConsumerTask in project smallrye-reactive-messaging by smallrye.
the class KafkaSinkTest method testInvalidPayloadType.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testInvalidPayloadType() {
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(topic, 4, Duration.ofSeconds(10));
MapBasedConfig config = getBaseConfig().with("topic", topic).with("value.serializer", IntegerSerializer.class.getName()).with("partition", 0).with("max-inflight-messages", 1L).with("channel-name", "my-channel").with("retries", // disable retry.
0L);
KafkaConnectorOutgoingConfiguration oc = new KafkaConnectorOutgoingConfiguration(config);
CountKafkaCdiEvents testCdiEvents = new CountKafkaCdiEvents();
sink = new KafkaSink(oc, testCdiEvents, UnsatisfiedInstance.instance());
await().until(() -> {
HealthReport.HealthReportBuilder builder = HealthReport.builder();
sink.isReady(builder);
return builder.build().isOk();
});
List<Object> acked = new CopyOnWriteArrayList<>();
List<Object> nacked = new CopyOnWriteArrayList<>();
Subscriber subscriber = sink.getSink().build();
Multi.createFrom().range(0, 6).map(i -> {
if (i == 3 || i == 5) {
return Integer.toString(i);
}
return i;
}).map(i -> Message.of(i, () -> {
acked.add(i);
return CompletableFuture.completedFuture(null);
}, t -> {
nacked.add(i);
return CompletableFuture.completedFuture(null);
})).subscribe(subscriber);
assertThat(consumed.awaitCompletion(Duration.ofMinutes(1)).count()).isEqualTo(4);
await().until(() -> nacked.size() >= 2);
assertThat(acked).containsExactly(0, 1, 2, 4);
assertThat(nacked).contains("3", "5");
assertThat(testCdiEvents.firedConsumerEvents.sum()).isEqualTo(0);
assertThat(testCdiEvents.firedProducerEvents.sum()).isEqualTo(1);
}
use of io.smallrye.reactive.messaging.kafka.companion.ConsumerTask in project smallrye-reactive-messaging by smallrye.
the class BatchTracingPropagationTest method testFromAppToKafka.
@SuppressWarnings("ConstantConditions")
@Test
public void testFromAppToKafka() {
List<Context> contexts = new CopyOnWriteArrayList<>();
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(topic, m -> m.plug(until(10L, Duration.ofMinutes(1), null)).onItem().invoke(record -> {
contexts.add(GlobalOpenTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), record.headers(), new HeaderExtractAdapter()));
}));
runApplication(getKafkaSinkConfigForMyAppGeneratingData(), MyAppGeneratingData.class);
await().until(() -> consumed.getRecords().size() >= 10);
List<Integer> values = new ArrayList<>();
assertThat(consumed.getRecords()).allSatisfy(record -> {
assertThat(record.value()).isNotNull();
values.add(record.value());
});
assertThat(values).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
assertThat(contexts).hasSize(10);
assertThat(contexts).doesNotContainNull().doesNotHaveDuplicates();
List<String> spanIds = contexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getSpanId()).collect(Collectors.toList());
assertThat(spanIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
List<String> traceIds = contexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getTraceId()).collect(Collectors.toList());
assertThat(traceIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
for (SpanData data : testExporter.getFinishedSpanItems()) {
assertThat(data.getSpanId()).isIn(spanIds);
assertThat(data.getSpanId()).isNotEqualTo(data.getParentSpanId());
assertThat(data.getTraceId()).isIn(traceIds);
assertThat(data.getKind()).isEqualByComparingTo(SpanKind.PRODUCER);
}
}
use of io.smallrye.reactive.messaging.kafka.companion.ConsumerTask in project smallrye-reactive-messaging by smallrye.
the class TracingPropagationTest method testFromKafkaToAppToKafka.
@Test
public void testFromKafkaToAppToKafka() {
List<Context> receivedContexts = new CopyOnWriteArrayList<>();
String resultTopic = topic + "-result";
String parentTopic = topic + "-parent";
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(resultTopic, m -> m.plug(until(10L, Duration.ofMinutes(1), null)).onItem().invoke(record -> {
receivedContexts.add(GlobalOpenTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), record.headers(), new HeaderExtractAdapter()));
}));
MyAppProcessingData bean = runApplication(getKafkaSinkConfigForMyAppProcessingData(resultTopic, parentTopic), MyAppProcessingData.class);
List<SpanContext> producedSpanContexts = new CopyOnWriteArrayList<>();
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(parentTopic, null, null, "a-key", i, createTracingSpan(producedSpanContexts, parentTopic)), 10);
await().until(() -> consumed.getRecords().size() >= 10);
List<Integer> values = new ArrayList<>();
assertThat(consumed.getRecords()).allSatisfy(record -> {
assertThat(record.value()).isNotNull();
values.add(record.value());
});
assertThat(values).containsExactly(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
List<String> producedTraceIds = producedSpanContexts.stream().map(SpanContext::getTraceId).collect(Collectors.toList());
assertThat(producedTraceIds).hasSize(10);
assertThat(receivedContexts).hasSize(10);
assertThat(receivedContexts).doesNotContainNull().doesNotHaveDuplicates();
List<String> receivedSpanIds = receivedContexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getSpanId()).collect(Collectors.toList());
assertThat(receivedSpanIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
List<String> receivedTraceIds = receivedContexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getTraceId()).collect(Collectors.toList());
assertThat(receivedTraceIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
assertThat(receivedTraceIds).containsExactlyInAnyOrderElementsOf(producedTraceIds);
assertThat(bean.tracing()).hasSizeGreaterThanOrEqualTo(10);
assertThat(bean.tracing()).doesNotContainNull().doesNotHaveDuplicates();
List<String> spanIds = new ArrayList<>();
for (TracingMetadata tracing : bean.tracing()) {
Span span = Span.fromContext(tracing.getCurrentContext());
spanIds.add(span.getSpanContext().getSpanId());
assertThat(Span.fromContextOrNull(tracing.getPreviousContext())).isNotNull();
}
await().atMost(Duration.ofMinutes(2)).until(() -> testExporter.getFinishedSpanItems().size() >= 10);
List<String> outgoingParentIds = new ArrayList<>();
List<String> incomingParentIds = new ArrayList<>();
for (SpanData data : testExporter.getFinishedSpanItems()) {
if (data.getKind().equals(SpanKind.CONSUMER)) {
incomingParentIds.add(data.getParentSpanId());
assertThat(data.getAttributes().get(SemanticAttributes.MESSAGING_KAFKA_CONSUMER_GROUP)).isNotNull();
assertThat(data.getAttributes().get(AttributeKey.stringKey("messaging.consumer_id"))).isNotNull();
// Need to skip the spans created during @Incoming processing
continue;
}
assertThat(data.getSpanId()).isIn(receivedSpanIds);
assertThat(data.getSpanId()).isNotEqualTo(data.getParentSpanId());
assertThat(data.getTraceId()).isIn(producedTraceIds);
assertThat(data.getKind()).isEqualByComparingTo(SpanKind.PRODUCER);
outgoingParentIds.add(data.getParentSpanId());
}
// Assert span created on Kafka record is the parent of consumer span we create
assertThat(producedSpanContexts.stream().map(SpanContext::getSpanId)).containsExactlyElementsOf(incomingParentIds);
// Assert consumer span is the parent of the producer span we received in Kafka
assertThat(spanIds.stream()).containsExactlyElementsOf(outgoingParentIds);
}
use of io.smallrye.reactive.messaging.kafka.companion.ConsumerTask in project smallrye-reactive-messaging by smallrye.
the class TracingPropagationTest method testFromAppToKafka.
@SuppressWarnings("ConstantConditions")
@Test
public void testFromAppToKafka() {
List<Context> contexts = new CopyOnWriteArrayList<>();
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(topic, m -> m.plug(until(10L, Duration.ofMinutes(1), null)).onItem().invoke(record -> {
contexts.add(GlobalOpenTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), record.headers(), new HeaderExtractAdapter()));
}));
runApplication(getKafkaSinkConfigForMyAppGeneratingData(), MyAppGeneratingData.class);
await().until(() -> consumed.getRecords().size() >= 10);
List<Integer> values = new ArrayList<>();
assertThat(consumed.getRecords()).allSatisfy(record -> {
assertThat(record.value()).isNotNull();
values.add(record.value());
});
assertThat(values).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
assertThat(contexts).hasSize(10);
assertThat(contexts).doesNotContainNull().doesNotHaveDuplicates();
List<String> spanIds = contexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getSpanId()).collect(Collectors.toList());
assertThat(spanIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
List<String> traceIds = contexts.stream().map(context -> Span.fromContextOrNull(context).getSpanContext().getTraceId()).collect(Collectors.toList());
assertThat(traceIds).doesNotContainNull().doesNotHaveDuplicates().hasSize(10);
for (SpanData data : testExporter.getFinishedSpanItems()) {
assertThat(data.getSpanId()).isIn(spanIds);
assertThat(data.getSpanId()).isNotEqualTo(data.getParentSpanId());
assertThat(data.getTraceId()).isIn(traceIds);
assertThat(data.getKind()).isEqualByComparingTo(SpanKind.PRODUCER);
}
}
use of io.smallrye.reactive.messaging.kafka.companion.ConsumerTask in project smallrye-reactive-messaging by smallrye.
the class KafkaSinkWithLegacyMetadataTest method testInvalidPayloadType.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testInvalidPayloadType() {
ConsumerTask<String, Integer> consumed = companion.consumeIntegers().fromTopics(topic, 4, Duration.ofSeconds(10));
MapBasedConfig config = getBaseConfig().with("topic", topic).with("value.serializer", IntegerSerializer.class.getName()).with("partition", 0).with("max-inflight-messages", 1L).with("channel-name", "my-channel").with("retries", // disable retry.
0L);
KafkaConnectorOutgoingConfiguration oc = new KafkaConnectorOutgoingConfiguration(config);
CountKafkaCdiEvents testCdiEvents = new CountKafkaCdiEvents();
sink = new KafkaSink(oc, testCdiEvents, UnsatisfiedInstance.instance());
await().until(() -> {
HealthReport.HealthReportBuilder builder = HealthReport.builder();
sink.isReady(builder);
return builder.build().isOk();
});
List<Object> acked = new CopyOnWriteArrayList<>();
List<Object> nacked = new CopyOnWriteArrayList<>();
Subscriber subscriber = sink.getSink().build();
Multi.createFrom().range(0, 6).map(i -> {
if (i == 3 || i == 5) {
return Integer.toString(i);
}
return i;
}).map(i -> Message.of(i, () -> {
acked.add(i);
return CompletableFuture.completedFuture(null);
}, t -> {
nacked.add(i);
return CompletableFuture.completedFuture(null);
})).subscribe(subscriber);
// 3 and 5 are ignored.
assertThat(consumed.awaitCompletion(Duration.ofMinutes(1)).count()).isEqualTo(4);
await().until(() -> nacked.size() >= 2);
assertThat(acked).containsExactly(0, 1, 2, 4);
assertThat(nacked).contains("3", "5");
assertThat(testCdiEvents.firedConsumerEvents.sum()).isEqualTo(0);
assertThat(testCdiEvents.firedProducerEvents.sum()).isEqualTo(1);
}
Aggregations