use of io.smallrye.reactive.messaging.health.HealthReport in project smallrye-reactive-messaging by smallrye.
the class DeprecatedCommitStrategiesTest method testThrottledStrategyWithTooManyUnackedMessages.
@Test
void testThrottledStrategyWithTooManyUnackedMessages() throws Exception {
MapBasedConfig config = commonConfiguration().with("client.id", UUID.randomUUID().toString()).with("commit-strategy", "throttled").with("auto.offset.reset", "earliest").with("health-enabled", true).with("throttled.unprocessed-record-max-age.ms", 1000).with("auto.commit.interval.ms", 100);
String group = UUID.randomUUID().toString();
source = new KafkaSource<>(vertx, group, new KafkaConnectorIncomingConfiguration(config), getConsumerRebalanceListeners(), CountKafkaCdiEvents.noCdiEvents, getDeserializationFailureHandlers(), -1);
injectMockConsumer(source, consumer);
List<Message<?>> list = new ArrayList<>();
source.getStream().subscribe().with(list::add);
TopicPartition p0 = new TopicPartition(TOPIC, 0);
TopicPartition p1 = new TopicPartition(TOPIC, 1);
Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(p0, 0L);
offsets.put(p1, 5L);
consumer.updateBeginningOffsets(offsets);
consumer.schedulePollTask(() -> {
consumer.rebalance(offsets.keySet());
source.getCommitHandler().partitionsAssigned(offsets.keySet());
for (int i = 0; i < 500; i++) {
consumer.addRecord(new ConsumerRecord<>(TOPIC, 0, i, "k", "v0-" + i));
consumer.addRecord(new ConsumerRecord<>(TOPIC, 1, i, "r", "v1-" + i));
}
});
// Expected number of messages: 500 messages in each partition minus the [0..5) messages from p1
int expected = 500 * 2 - 5;
await().until(() -> list.size() == expected);
assertThat(list).hasSize(expected);
// Only ack the one from partition 0, and the 3 first items from partition 1.
int count = 0;
for (Message<?> message : list) {
IncomingKafkaRecordMetadata<?, ?> metadata = message.getMetadata(IncomingKafkaRecordMetadata.class).orElseThrow(() -> new Exception("metadata expected"));
if (metadata.getPartition() == 0) {
message.ack().toCompletableFuture().join();
} else {
if (count < 5) {
message.ack().toCompletableFuture().join();
count = count + 1;
}
}
LegacyMetadataTestUtils.tempCompareLegacyAndApiMetadata(metadata, message);
}
AtomicReference<HealthReport> report = new AtomicReference<>();
await().until(() -> {
HealthReport.HealthReportBuilder builder = HealthReport.builder();
source.isAlive(builder);
HealthReport r = builder.build();
report.set(r);
return !r.isOk();
});
HealthReport r = report.get();
String message = r.getChannels().get(0).getMessage();
assertThat(message).contains("my-topic-1", "9");
}
use of io.smallrye.reactive.messaging.health.HealthReport in project smallrye-reactive-messaging by smallrye.
the class CommitStrategiesTest method testThrottledStrategyWithTooManyUnackedMessages.
@Test
void testThrottledStrategyWithTooManyUnackedMessages() throws Exception {
MapBasedConfig config = commonConfiguration().with("client.id", UUID.randomUUID().toString()).with("commit-strategy", "throttled").with("auto.offset.reset", "earliest").with("health-enabled", true).with("throttled.unprocessed-record-max-age.ms", 1000).with("auto.commit.interval.ms", 100);
String group = UUID.randomUUID().toString();
source = new KafkaSource<>(vertx, group, new KafkaConnectorIncomingConfiguration(config), getConsumerRebalanceListeners(), CountKafkaCdiEvents.noCdiEvents, getDeserializationFailureHandlers(), -1);
injectMockConsumer(source, consumer);
List<Message<?>> list = new ArrayList<>();
source.getStream().subscribe().with(list::add);
TopicPartition p0 = new TopicPartition(TOPIC, 0);
TopicPartition p1 = new TopicPartition(TOPIC, 1);
Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(p0, 0L);
offsets.put(p1, 5L);
consumer.updateBeginningOffsets(offsets);
consumer.schedulePollTask(() -> {
consumer.rebalance(offsets.keySet());
source.getCommitHandler().partitionsAssigned(offsets.keySet());
for (int i = 0; i < 500; i++) {
consumer.addRecord(new ConsumerRecord<>(TOPIC, 0, i, "k", "v0-" + i));
consumer.addRecord(new ConsumerRecord<>(TOPIC, 1, i, "r", "v1-" + i));
}
});
// Expected number of messages: 500 messages in each partition minus the [0..5) messages from p1
int expected = 500 * 2 - 5;
await().until(() -> list.size() == expected);
assertThat(list).hasSize(expected);
// Only ack the one from partition 0, and the 3 first items from partition 1.
int count = 0;
for (Message<?> message : list) {
IncomingKafkaRecordMetadata<?, ?> metadata = message.getMetadata(IncomingKafkaRecordMetadata.class).orElseThrow(() -> new Exception("metadata expected"));
if (metadata.getPartition() == 0) {
message.ack().toCompletableFuture().join();
} else {
if (count < 5) {
message.ack().toCompletableFuture().join();
count = count + 1;
}
}
LegacyMetadataTestUtils.tempCompareLegacyAndApiMetadata(metadata, message);
}
AtomicReference<HealthReport> report = new AtomicReference<>();
await().until(() -> {
HealthReport.HealthReportBuilder builder = HealthReport.builder();
source.isAlive(builder);
HealthReport r = builder.build();
report.set(r);
return !r.isOk();
});
HealthReport r = report.get();
String message = r.getChannels().get(0).getMessage();
assertThat(message).contains("my-topic-1", "9");
}
use of io.smallrye.reactive.messaging.health.HealthReport in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceTest method testABeanConsumingTheKafkaMessagesWithRawMessage.
@SuppressWarnings("unchecked")
@Test
public void testABeanConsumingTheKafkaMessagesWithRawMessage() {
String group = UUID.randomUUID().toString();
ConsumptionBeanUsingRawMessage bean = runApplication(myKafkaSourceConfig(topic, 0, null, group), ConsumptionBeanUsingRawMessage.class);
List<Integer> list = bean.getResults();
assertThat(list).isEmpty();
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(topic, i), 10);
await().atMost(2, TimeUnit.MINUTES).until(() -> list.size() >= 10);
assertThat(list).containsExactly(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
List<Message<Integer>> messages = bean.getKafkaMessages();
messages.forEach(m -> {
// TODO Import normally once the deprecateed copy in this package has gone
io.smallrye.reactive.messaging.kafka.api.IncomingKafkaRecordMetadata<String, Integer> metadata = m.getMetadata(io.smallrye.reactive.messaging.kafka.api.IncomingKafkaRecordMetadata.class).orElseThrow(() -> new AssertionError("Metadata expected"));
assertThat(metadata.getTopic()).isEqualTo(topic);
assertThat(metadata.getTimestamp()).isAfter(Instant.EPOCH);
assertThat(metadata.getPartition()).isGreaterThan(-1);
assertThat(metadata.getOffset()).isGreaterThan(-1);
Assert.assertSame(metadata, KafkaMetadataUtil.readIncomingKafkaMetadata(m).get());
LegacyMetadataTestUtils.tempCompareLegacyAndApiMetadata(metadata, m);
});
HealthReport liveness = getHealth().getLiveness();
HealthReport readiness = getHealth().getReadiness();
assertThat(liveness.isOk()).isTrue();
assertThat(readiness.isOk()).isTrue();
assertThat(liveness.getChannels()).hasSize(1);
assertThat(readiness.getChannels()).hasSize(1);
assertThat(liveness.getChannels().get(0).getChannel()).isEqualTo("data");
assertThat(readiness.getChannels().get(0).getChannel()).isEqualTo("data");
}
use of io.smallrye.reactive.messaging.health.HealthReport in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceWithLegacyMetadataTest method testABeanConsumingTheKafkaMessagesWithRawMessage.
@SuppressWarnings("unchecked")
@Test
public void testABeanConsumingTheKafkaMessagesWithRawMessage() {
String group = UUID.randomUUID().toString();
ConsumptionBeanUsingRawMessage bean = runApplication(myKafkaSourceConfig(0, null, group), ConsumptionBeanUsingRawMessage.class);
List<Integer> list = bean.getResults();
assertThat(list).isEmpty();
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>("legacy-data", i), 10);
await().atMost(2, TimeUnit.MINUTES).until(() -> list.size() >= 10);
assertThat(list).containsExactly(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
List<Message<Integer>> messages = bean.getKafkaMessages();
messages.forEach(m -> {
IncomingKafkaRecordMetadata<String, Integer> metadata = m.getMetadata(IncomingKafkaRecordMetadata.class).orElseThrow(() -> new AssertionError("Metadata expected"));
assertThat(metadata.getTopic()).isEqualTo("legacy-data");
assertThat(metadata.getTimestamp()).isAfter(Instant.EPOCH);
assertThat(metadata.getPartition()).isGreaterThan(-1);
assertThat(metadata.getOffset()).isGreaterThan(-1);
LegacyMetadataTestUtils.tempCompareLegacyAndApiMetadata(KafkaMetadataUtil.readIncomingKafkaMetadata(m).get(), m);
});
HealthReport liveness = getHealth().getLiveness();
HealthReport readiness = getHealth().getReadiness();
assertThat(liveness.isOk()).isTrue();
assertThat(readiness.isOk()).isTrue();
assertThat(liveness.getChannels()).hasSize(1);
assertThat(readiness.getChannels()).hasSize(1);
assertThat(liveness.getChannels().get(0).getChannel()).isEqualTo("data");
assertThat(readiness.getChannels().get(0).getChannel()).isEqualTo("data");
}
use of io.smallrye.reactive.messaging.health.HealthReport in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceWithLegacyMetadataTest method testABeanConsumingTheKafkaMessages.
@Test
public void testABeanConsumingTheKafkaMessages() {
String topic = UUID.randomUUID().toString();
String group = UUID.randomUUID().toString();
ConsumptionBean bean = run(myKafkaSourceConfig(topic, group));
List<Integer> list = bean.getResults();
assertThat(list).isEmpty();
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(topic, i), 10);
await().atMost(2, TimeUnit.MINUTES).until(() -> list.size() >= 10);
assertThat(list).containsExactly(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
List<KafkaRecord<String, Integer>> messages = bean.getKafkaMessages();
messages.forEach(m -> {
assertThat(m.getTopic()).isEqualTo(topic);
assertThat(m.getTimestamp()).isAfter(Instant.EPOCH);
assertThat(m.getPartition()).isGreaterThan(-1);
});
HealthReport liveness = getHealth().getLiveness();
HealthReport readiness = getHealth().getReadiness();
assertThat(liveness.isOk()).isTrue();
assertThat(readiness.isOk()).isTrue();
assertThat(liveness.getChannels()).hasSize(1);
assertThat(readiness.getChannels()).hasSize(1);
assertThat(liveness.getChannels().get(0).getChannel()).isEqualTo("data");
assertThat(readiness.getChannels().get(0).getChannel()).isEqualTo("data");
KafkaClientService service = get(KafkaClientService.class);
assertThat(service.getConsumer("data")).isNotNull();
assertThat(service.getConsumer("missing")).isNull();
assertThatThrownBy(() -> service.getConsumer(null)).isInstanceOf(NullPointerException.class);
}
Aggregations