use of io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceWithLegacyMetadataTest method myKafkaSourceConfig.
// @SuppressWarnings({ "rawtypes" })
// @Test
// public void testRecoveryAfterMissedHeartbeat() throws InterruptedException {
// MapBasedConfig config = newCommonConfigForSource()
// .with("bootstrap.servers", KafkaBrokerExtension.usage.getBootstrapServers())
// .with("value.deserializer", IntegerDeserializer.class.getName())
// .with(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 6000)
// .with(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 100)
// .with("retry", true)
// .with("retry-attempts", 100)
// .with("retry-max-wait", 30);
//
// usage.setBootstrapServers(KafkaBrokerExtension.usage.getBootstrapServers());
//
// KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
// source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic,
// UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents,
// UnsatisfiedInstance.instance(), -1);
// List<KafkaRecord> messages1 = new ArrayList<>();
// source.getStream().subscribe().with(messages1::add);
//
// AtomicInteger counter = new AtomicInteger();
// usage.produceIntegers(10, null,
// () -> new ProducerRecord<>(topic, counter.getAndIncrement()))).start();
//
// await().atMost(2, TimeUnit.MINUTES).until(() -> messages1.size() >= 10);
//
// KafkaBrokerExtension.getProxy().setConnectionCut(true);
// Thread.sleep(6000 + 500); // session timeout + a bit more just in case.
// KafkaBrokerExtension.getProxy().setConnectionCut(false);
//
// usage.produceIntegers(10, null,
// () -> new ProducerRecord<>(topic, counter.getAndIncrement()))).start();
//
// await().atMost(2, TimeUnit.MINUTES).until(() -> messages1.size() >= 20);
// assertThat(messages1.size()).isGreaterThanOrEqualTo(20);
// }
private KafkaMapBasedConfig myKafkaSourceConfig(int partitions, String withConsumerRebalanceListener, String group) {
KafkaMapBasedConfig config = kafkaConfig("mp.messaging.incoming.data");
if (group != null) {
config.put("group.id", group);
}
config.put("value.deserializer", IntegerDeserializer.class.getName());
config.put("enable.auto.commit", "false");
config.put("auto.offset.reset", "earliest");
config.put("topic", "legacy-data");
if (partitions > 0) {
config.put("partitions", Integer.toString(partitions));
config.put("topic", "legacy-data-" + partitions);
}
if (withConsumerRebalanceListener != null) {
config.put("consumer-rebalance-listener.name", withConsumerRebalanceListener);
}
return config;
}
use of io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig in project smallrye-reactive-messaging by smallrye.
the class KafkaSinkWithCloudEventsTest method getConfigToSendBinaryCloudEvents.
private KafkaMapBasedConfig getConfigToSendBinaryCloudEvents() {
KafkaMapBasedConfig config = kafkaConfig("mp.messaging.outgoing.kafka");
config.put("value.serializer", StringSerializer.class.getName());
config.put("topic", topic);
return config;
}
use of io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceBatchWithCloudEventsTest method testReceivingStructuredCloudEventsWithUnsupportedDeserializer.
@Test
public void testReceivingStructuredCloudEventsWithUnsupportedDeserializer() {
KafkaMapBasedConfig config = newCommonConfig();
config.put("topic", topic);
// Unsupported on purpose
config.put("value.deserializer", BufferDeserializer.class.getName());
config.put("channel-name", topic);
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = new ArrayList<>();
source.getBatchStream().subscribe().with(messages::add);
companion.produce(String.class, JsonObject.class).fromRecords(new ProducerRecord<>(topic, null, null, "key", new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("data", new JsonObject().put("name", "neo")), Collections.singletonList(new RecordHeader("content-type", "application/cloudevents+json; charset=utf-8".getBytes()))));
await().pollDelay(Duration.ofSeconds(1)).atMost(2, TimeUnit.MINUTES).until(() -> messages.size() == 0);
// Nothing has been received because the deserializer is not supported.
}
use of io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceBatchWithCloudEventsTest method testReceivingBinaryCloudEvents.
@SuppressWarnings("unchecked")
@Test
public void testReceivingBinaryCloudEvents() {
KafkaMapBasedConfig config = newCommonConfig();
config.put("topic", topic);
config.put("value.deserializer", StringDeserializer.class.getName());
config.put("channel-name", topic);
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = new ArrayList<>();
source.getBatchStream().subscribe().with(m -> messages.addAll(getRecordsFromBatchMessage(m)));
List<Header> headers = new ArrayList<>();
headers.add(new RecordHeader("ce_specversion", CloudEventMetadata.CE_VERSION_1_0.getBytes()));
headers.add(new RecordHeader("ce_type", "type".getBytes()));
headers.add(new RecordHeader("ce_source", "test://test".getBytes()));
headers.add(new RecordHeader("ce_id", "id".getBytes()));
headers.add(new RecordHeader("ce_time", "2020-07-23T07:59:04Z".getBytes()));
headers.add(new RecordHeader("content-type", "text/plain".getBytes()));
headers.add(new RecordHeader("ce_subject", "foo".getBytes()));
headers.add(new RecordHeader("ce_dataschema", "http://schema.io".getBytes()));
headers.add(new RecordHeader("ce_ext", "bar".getBytes()));
headers.add(new RecordHeader("some-header", "baz".getBytes()));
companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "key", "Hello World", headers));
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
Message<?> message = messages.get(0);
IncomingKafkaCloudEventMetadata<String, String> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
assertThat(metadata).isNotNull();
assertThat(metadata.getSpecVersion()).isEqualTo(CloudEventMetadata.CE_VERSION_1_0);
assertThat(metadata.getType()).isEqualTo("type");
assertThat(metadata.getId()).isEqualTo("id");
assertThat(metadata.getSource()).isEqualTo(URI.create("test://test"));
assertThat(metadata.getSubject()).hasValue("foo");
assertThat(metadata.getDataSchema()).hasValue(URI.create("http://schema.io"));
assertThat(metadata.getTimeStamp()).isNotEmpty();
assertThat(metadata.getData()).isEqualTo("Hello World");
// Rule 3.2.1 - the content-type must be mapped to the datacontenttype attribute
assertThat(metadata.getDataContentType()).hasValue("text/plain");
// Rule 3.2.3
assertThat(metadata.getExtension("ext")).hasValue("bar");
assertThat(metadata.getExtension("some-header")).isEmpty();
// Extensions
assertThat(metadata.getKey()).isEqualTo("key");
// Rule 3.1 - partitionkey attribute
assertThat(metadata.<String>getExtension("partitionkey")).hasValue("key");
assertThat(metadata.getTopic()).isEqualTo(topic);
assertThat(message.getPayload()).isInstanceOf(String.class).isEqualTo("Hello World");
}
use of io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceBatchWithCloudEventsTest method testReceivingStructuredCloudEventsWithSupportDisabled.
@SuppressWarnings("unchecked")
@Test
public void testReceivingStructuredCloudEventsWithSupportDisabled() {
KafkaMapBasedConfig config = newCommonConfig();
config.put("topic", topic);
config.put("value.deserializer", JsonObjectDeserializer.class.getName());
config.put("channel-name", topic);
config.put("cloud-events", false);
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = new ArrayList<>();
source.getBatchStream().subscribe().with(m -> messages.addAll(getRecordsFromBatchMessage(m)));
companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, null, new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("subject", "foo").put("datacontenttype", "application/json").put("dataschema", "http://schema.io").put("time", "2020-07-23T09:12:34Z").put("data", new JsonObject().put("name", "neo")).encode(), Collections.singletonList(new RecordHeader("content-type", "application/cloudevents+json; charset=utf-8".getBytes()))));
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
Message<?> message = messages.get(0);
IncomingKafkaCloudEventMetadata<String, JsonObject> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
assertThat(metadata).isNull();
assertThat(message.getPayload()).isInstanceOf(JsonObject.class);
assertThat(((JsonObject) message.getPayload()).getJsonObject("data").getString("name")).isEqualTo("neo");
}
Aggregations