use of io.smallrye.reactive.messaging.kafka.KafkaRecord in project automatiko-engine by automatiko-io.
the class StructuredCEVerificationTest method testProcessViaIntermediateEventExecution.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testProcessViaIntermediateEventExecution() throws InterruptedException {
String temp = "{\"timestamp\":1, \"value\" : 25.0, \"location\":\"kitchen\"}";
InMemorySource<KafkaRecord<String, String>> channelAlarms = connector.source("alarm");
InMemorySource<KafkaRecord<String, String>> channelExtraAlarms = connector.source("extraalarms");
InMemorySink<KafkaRecord<String, String>> channelPocessed = connector.sink("processed");
String id = "room";
execCounter.reset(1);
channelAlarms.send(KafkaRecord.of(id, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(1));
Map data = given().accept(ContentType.JSON).when().get("/alarms/" + id).then().statusCode(200).body("id", is(id)).extract().as(Map.class);
Object alarm = data.get("alarm");
assertNotNull(alarm);
List<Map<String, String>> taskInfo = given().accept(ContentType.JSON).when().get("/alarms/" + id + "/tasks").then().statusCode(200).extract().as(List.class);
assertEquals(0, taskInfo.size());
execCounter.reset(1);
channelExtraAlarms.send(KafkaRecord.of(id, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(0));
List<? extends Message<KafkaRecord<String, String>>> received = channelPocessed.received();
assertEquals(0, received.size());
channelPocessed.clear();
}
use of io.smallrye.reactive.messaging.kafka.KafkaRecord in project automatiko-engine by automatiko-io.
the class StructuredCEVerificationTest method testProcessViaIntermediateEventExecutionCorrelation.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testProcessViaIntermediateEventExecutionCorrelation() throws InterruptedException {
String temp = "{\"timestamp\":1, \"value\" : 25.0, \"location\":\"kitchen\"}";
InMemorySource<KafkaRecord<String, String>> channelAlarms = connector.source("alarm");
InMemorySource<KafkaRecord<String, String>> channelExtraAlarms = connector.source("extraalarms");
InMemorySink<KafkaRecord<String, String>> channelPocessed = connector.sink("processed");
String id = "room";
String id2 = "room2";
execCounter.reset(2);
channelAlarms.send(KafkaRecord.of(id, temp));
Thread.sleep(2000);
channelAlarms.send(KafkaRecord.of(id2, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(2));
Map data = given().accept(ContentType.JSON).when().get("/alarms/" + id).then().statusCode(200).body("id", is(id)).extract().as(Map.class);
Object alarm = data.get("alarm");
assertNotNull(alarm);
List<Map<String, String>> taskInfo = given().accept(ContentType.JSON).when().get("/alarms/" + id + "/tasks").then().statusCode(200).extract().as(List.class);
assertEquals(0, taskInfo.size());
execCounter.reset(1);
channelExtraAlarms.send(KafkaRecord.of(id, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(1));
List<? extends Message<KafkaRecord<String, String>>> received = channelPocessed.received();
assertEquals(0, received.size());
channelPocessed.clear();
given().accept(ContentType.JSON).when().get("/alarms/" + id).then().statusCode(404);
given().accept(ContentType.JSON).when().get("/alarms/" + id2).then().statusCode(200);
given().accept(ContentType.JSON).when().delete("/alarms/" + id2).then().statusCode(200);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(0));
}
use of io.smallrye.reactive.messaging.kafka.KafkaRecord in project automatiko-engine by automatiko-io.
the class VerificationTest method testProcessViaIntermediateEventExecution.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testProcessViaIntermediateEventExecution() throws InterruptedException {
String temp = "{\"timestamp\":1, \"value\" : 25.0, \"location\":\"kitchen\"}";
InMemorySource<KafkaRecord<String, String>> channelAlarms = connector.source("alarm");
InMemorySource<KafkaRecord<String, String>> channelExtraAlarms = connector.source("extraalarms");
InMemorySink<KafkaRecord<String, String>> channelPocessed = connector.sink("processed");
String id = "room";
execCounter.reset(1);
channelAlarms.send(KafkaRecord.of(id, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(1));
Map data = given().accept(ContentType.JSON).when().get("/alarms/" + id).then().statusCode(200).body("id", is(id)).extract().as(Map.class);
Object alarm = data.get("alarm");
assertNotNull(alarm);
List<Map<String, String>> taskInfo = given().accept(ContentType.JSON).when().get("/alarms/" + id + "/tasks").then().statusCode(200).extract().as(List.class);
assertEquals(0, taskInfo.size());
execCounter.reset(1);
channelExtraAlarms.send(KafkaRecord.of(id, temp));
execCounter.waitTillCompletion(5);
given().accept(ContentType.JSON).when().get("/alarms").then().statusCode(200).body("$.size()", is(0));
List<? extends Message<KafkaRecord<String, String>>> received = channelPocessed.received();
assertEquals(0, received.size());
channelPocessed.clear();
}
use of io.smallrye.reactive.messaging.kafka.KafkaRecord in project smallrye-reactive-messaging by smallrye.
the class KafkaCommitHandlerTest method testSourceWithThrottledLatestProcessedCommitEnabledWithoutAck.
@Test
public void testSourceWithThrottledLatestProcessedCommitEnabledWithoutAck() {
MapBasedConfig config = newCommonConfigForSource().with("client.id", UUID.randomUUID().toString()).with("group.id", "test-source-with-throttled-latest-processed-commit-without-acking").with("value.deserializer", IntegerDeserializer.class.getName()).with("commit-strategy", "throttled").with("max.poll.records", 16).with("throttled.unprocessed-record-max-age.ms", 100);
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, "test-source-with-throttled-latest-processed-commit-without-acking", ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = Collections.synchronizedList(new ArrayList<>());
source.getStream().subscribe().with(messages::add);
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(topic, i), 10);
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 10);
assertThat(messages.stream().map(m -> ((KafkaRecord<String, Integer>) m).getPayload()).collect(Collectors.toList())).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
await().atMost(2, TimeUnit.MINUTES).untilAsserted(() -> {
HealthReport.HealthReportBuilder healthReportBuilder = HealthReport.builder();
source.isAlive(healthReportBuilder);
assertTrue(healthReportBuilder.build().isOk());
});
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(topic, i), 30);
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 30);
await().atMost(2, TimeUnit.MINUTES).untilAsserted(() -> {
HealthReport.HealthReportBuilder healthReportBuilder = HealthReport.builder();
source.isAlive(healthReportBuilder);
assertFalse(healthReportBuilder.build().isOk());
});
}
use of io.smallrye.reactive.messaging.kafka.KafkaRecord in project smallrye-reactive-messaging by smallrye.
the class KafkaCommitHandlerTest method testSourceWithAutoCommitDisabled.
@Test
public void testSourceWithAutoCommitDisabled() throws ExecutionException, InterruptedException, TimeoutException {
MapBasedConfig config = newCommonConfigForSource().with("group.id", "test-source-with-auto-commit-disabled").with("value.deserializer", IntegerDeserializer.class.getName()).with("commit-strategy", "latest");
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, "test-source-with-auto-commit-disabled", ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = Collections.synchronizedList(new ArrayList<>());
source.getStream().subscribe().with(messages::add);
companion.produceIntegers().usingGenerator(i -> new ProducerRecord<>(topic, i), 10);
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 10);
assertThat(messages.stream().map(m -> ((KafkaRecord<String, Integer>) m).getPayload()).collect(Collectors.toList())).containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
Message<?> last = messages.get(messages.size() - 1);
last.ack().toCompletableFuture().get(2, TimeUnit.MINUTES);
await().ignoreExceptions().untilAsserted(() -> {
TopicPartition topicPartition = new TopicPartition(topic, 0);
OffsetAndMetadata offset = companion.consumerGroups().offsets("test-source-with-auto-commit-disabled", topicPartition);
assertNotNull(offset);
assertEquals(10L, offset.offset());
});
}
Aggregations