Search in sources :

Example 1 with IncomingKafkaCloudEventMetadata

use of io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata in project smallrye-reactive-messaging by smallrye.

the class KafkaSourceBatchWithCloudEventsTest method testReceivingBinaryCloudEvents.

@SuppressWarnings("unchecked")
@Test
public void testReceivingBinaryCloudEvents() {
    KafkaMapBasedConfig config = newCommonConfig();
    config.put("topic", topic);
    config.put("value.deserializer", StringDeserializer.class.getName());
    config.put("channel-name", topic);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
    source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
    List<Message<?>> messages = new ArrayList<>();
    source.getBatchStream().subscribe().with(m -> messages.addAll(getRecordsFromBatchMessage(m)));
    List<Header> headers = new ArrayList<>();
    headers.add(new RecordHeader("ce_specversion", CloudEventMetadata.CE_VERSION_1_0.getBytes()));
    headers.add(new RecordHeader("ce_type", "type".getBytes()));
    headers.add(new RecordHeader("ce_source", "test://test".getBytes()));
    headers.add(new RecordHeader("ce_id", "id".getBytes()));
    headers.add(new RecordHeader("ce_time", "2020-07-23T07:59:04Z".getBytes()));
    headers.add(new RecordHeader("content-type", "text/plain".getBytes()));
    headers.add(new RecordHeader("ce_subject", "foo".getBytes()));
    headers.add(new RecordHeader("ce_dataschema", "http://schema.io".getBytes()));
    headers.add(new RecordHeader("ce_ext", "bar".getBytes()));
    headers.add(new RecordHeader("some-header", "baz".getBytes()));
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "key", "Hello World", headers));
    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
    Message<?> message = messages.get(0);
    IncomingKafkaCloudEventMetadata<String, String> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNotNull();
    assertThat(metadata.getSpecVersion()).isEqualTo(CloudEventMetadata.CE_VERSION_1_0);
    assertThat(metadata.getType()).isEqualTo("type");
    assertThat(metadata.getId()).isEqualTo("id");
    assertThat(metadata.getSource()).isEqualTo(URI.create("test://test"));
    assertThat(metadata.getSubject()).hasValue("foo");
    assertThat(metadata.getDataSchema()).hasValue(URI.create("http://schema.io"));
    assertThat(metadata.getTimeStamp()).isNotEmpty();
    assertThat(metadata.getData()).isEqualTo("Hello World");
    // Rule 3.2.1 - the content-type must be mapped to the datacontenttype attribute
    assertThat(metadata.getDataContentType()).hasValue("text/plain");
    // Rule 3.2.3
    assertThat(metadata.getExtension("ext")).hasValue("bar");
    assertThat(metadata.getExtension("some-header")).isEmpty();
    // Extensions
    assertThat(metadata.getKey()).isEqualTo("key");
    // Rule 3.1 - partitionkey attribute
    assertThat(metadata.<String>getExtension("partitionkey")).hasValue("key");
    assertThat(metadata.getTopic()).isEqualTo(topic);
    assertThat(message.getPayload()).isInstanceOf(String.class).isEqualTo("Hello World");
}
Also used : Message(org.eclipse.microprofile.reactive.messaging.Message) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ArrayList(java.util.ArrayList) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Header(org.apache.kafka.common.header.Header) IncomingKafkaCloudEventMetadata(io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata) KafkaConnectorIncomingConfiguration(io.smallrye.reactive.messaging.kafka.KafkaConnectorIncomingConfiguration) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaMapBasedConfig(io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig) Test(org.junit.jupiter.api.Test)

Example 2 with IncomingKafkaCloudEventMetadata

use of io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata in project smallrye-reactive-messaging by smallrye.

the class KafkaSourceBatchWithCloudEventsTest method testReceivingStructuredCloudEventsWithSupportDisabled.

@SuppressWarnings("unchecked")
@Test
public void testReceivingStructuredCloudEventsWithSupportDisabled() {
    KafkaMapBasedConfig config = newCommonConfig();
    config.put("topic", topic);
    config.put("value.deserializer", JsonObjectDeserializer.class.getName());
    config.put("channel-name", topic);
    config.put("cloud-events", false);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
    source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
    List<Message<?>> messages = new ArrayList<>();
    source.getBatchStream().subscribe().with(m -> messages.addAll(getRecordsFromBatchMessage(m)));
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, null, new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("subject", "foo").put("datacontenttype", "application/json").put("dataschema", "http://schema.io").put("time", "2020-07-23T09:12:34Z").put("data", new JsonObject().put("name", "neo")).encode(), Collections.singletonList(new RecordHeader("content-type", "application/cloudevents+json; charset=utf-8".getBytes()))));
    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
    Message<?> message = messages.get(0);
    IncomingKafkaCloudEventMetadata<String, JsonObject> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNull();
    assertThat(message.getPayload()).isInstanceOf(JsonObject.class);
    assertThat(((JsonObject) message.getPayload()).getJsonObject("data").getString("name")).isEqualTo("neo");
}
Also used : Message(org.eclipse.microprofile.reactive.messaging.Message) ArrayList(java.util.ArrayList) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) JsonObject(io.vertx.core.json.JsonObject) JsonObjectDeserializer(io.vertx.kafka.client.serialization.JsonObjectDeserializer) IncomingKafkaCloudEventMetadata(io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata) KafkaConnectorIncomingConfiguration(io.smallrye.reactive.messaging.kafka.KafkaConnectorIncomingConfiguration) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaMapBasedConfig(io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig) Test(org.junit.jupiter.api.Test)

Example 3 with IncomingKafkaCloudEventMetadata

use of io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata in project smallrye-reactive-messaging by smallrye.

the class KafkaSourceBatchWithCloudEventsTest method testReceivingBinaryCloudEventsWithSupportDisabled.

@SuppressWarnings("unchecked")
@Test
public void testReceivingBinaryCloudEventsWithSupportDisabled() {
    KafkaMapBasedConfig config = newCommonConfig();
    config.put("topic", topic);
    config.put("value.deserializer", StringDeserializer.class.getName());
    config.put("channel-name", topic);
    config.put("cloud-events", false);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
    source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
    List<Message<?>> messages = new ArrayList<>();
    source.getBatchStream().subscribe().with(m -> messages.addAll(getRecordsFromBatchMessage(m)));
    List<Header> headers = new ArrayList<>();
    headers.add(new RecordHeader("ce_specversion", CloudEventMetadata.CE_VERSION_1_0.getBytes()));
    headers.add(new RecordHeader("ce_type", "type".getBytes()));
    headers.add(new RecordHeader("ce_source", "test://test".getBytes()));
    headers.add(new RecordHeader("ce_id", "id".getBytes()));
    headers.add(new RecordHeader("ce_time", "2020-07-23T07:59:04Z".getBytes()));
    headers.add(new RecordHeader("content-type", "text/plain".getBytes()));
    headers.add(new RecordHeader("ce_subject", "foo".getBytes()));
    headers.add(new RecordHeader("ce_dataschema", "http://schema.io".getBytes()));
    headers.add(new RecordHeader("ce_ext", "bar".getBytes()));
    headers.add(new RecordHeader("some-header", "baz".getBytes()));
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "key", "Hello World", headers));
    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
    Message<?> message = messages.get(0);
    IncomingKafkaCloudEventMetadata<String, String> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNull();
    assertThat(message.getPayload()).isInstanceOf(String.class).isEqualTo("Hello World");
}
Also used : Message(org.eclipse.microprofile.reactive.messaging.Message) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ArrayList(java.util.ArrayList) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Header(org.apache.kafka.common.header.Header) IncomingKafkaCloudEventMetadata(io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata) KafkaConnectorIncomingConfiguration(io.smallrye.reactive.messaging.kafka.KafkaConnectorIncomingConfiguration) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaMapBasedConfig(io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig) Test(org.junit.jupiter.api.Test)

Example 4 with IncomingKafkaCloudEventMetadata

use of io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata in project smallrye-reactive-messaging by smallrye.

the class KafkaSourceBatchWithCloudEventsTest method testWithBeanReceivingBinaryAndStructuredCloudEvents.

@SuppressWarnings("unchecked")
@Test
public void testWithBeanReceivingBinaryAndStructuredCloudEvents() {
    ConsumptionBean bean = run(getConfig(topic));
    List<KafkaRecord<String, String>> list = bean.getKafkaRecords();
    assertThat(list).isEmpty();
    // Send a binary cloud event
    List<Header> headers = new ArrayList<>();
    headers.add(new RecordHeader("ce_specversion", CloudEventMetadata.CE_VERSION_1_0.getBytes()));
    headers.add(new RecordHeader("ce_type", "type".getBytes()));
    headers.add(new RecordHeader("ce_source", "test://test".getBytes()));
    headers.add(new RecordHeader("ce_id", "id".getBytes()));
    headers.add(new RecordHeader("content-type", "text/plain".getBytes()));
    headers.add(new RecordHeader("ce_subject", "foo".getBytes()));
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "binary", "Hello Binary 1", headers));
    await().atMost(10, TimeUnit.SECONDS).until(() -> list.size() >= 1);
    KafkaRecord<String, String> record = list.get(0);
    assertThat(record.getTopic()).isEqualTo(topic);
    IncomingKafkaCloudEventMetadata<String, String> metadata = record.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNotNull();
    assertThat(metadata.getTopic()).isEqualTo(topic);
    assertThat(metadata.getKey()).isEqualTo("binary");
    assertThat(metadata.getId()).isEqualTo("id");
    assertThat(metadata.getSubject()).hasValue("foo");
    assertThat(metadata.getData()).isEqualTo("Hello Binary 1");
    assertThat(record.getPayload()).isEqualTo("Hello Binary 1");
    // send a structured event
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "structured", new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("subject", "bar").put("datacontenttype", "application/json").put("dataschema", "http://schema.io").put("time", "2020-07-23T09:12:34Z").put("data", "Hello Structured 1").encode(), Collections.singletonList(new RecordHeader("content-type", "application/cloudevents+json; charset=utf-8".getBytes()))));
    await().atMost(10, TimeUnit.SECONDS).until(() -> list.size() >= 2);
    record = list.get(1);
    assertThat(record.getTopic()).isEqualTo(topic);
    metadata = record.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNotNull();
    assertThat(metadata.getTopic()).isEqualTo(topic);
    assertThat(metadata.getKey()).isEqualTo("structured");
    assertThat(metadata.getId()).isEqualTo("id");
    assertThat(metadata.getSubject()).hasValue("bar");
    assertThat(metadata.getData()).isEqualTo("Hello Structured 1");
    assertThat(record.getPayload()).contains("Hello Structured 1");
    // Send a last binary cloud event
    List<Header> headers2 = new ArrayList<>();
    headers2.add(new RecordHeader("ce_specversion", CloudEventMetadata.CE_VERSION_1_0.getBytes()));
    headers2.add(new RecordHeader("ce_type", "type".getBytes()));
    headers2.add(new RecordHeader("ce_source", "test://test".getBytes()));
    headers2.add(new RecordHeader("ce_id", "id".getBytes()));
    headers2.add(new RecordHeader("content-type", "text/plain".getBytes()));
    headers2.add(new RecordHeader("ce_subject", "foo".getBytes()));
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "binary", "Hello Binary 2", headers2));
    await().atMost(10, TimeUnit.SECONDS).until(() -> list.size() >= 3);
    record = list.get(2);
    assertThat(record.getTopic()).isEqualTo(topic);
    metadata = record.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNotNull();
    assertThat(metadata.getTopic()).isEqualTo(topic);
    assertThat(metadata.getKey()).isEqualTo("binary");
    assertThat(metadata.getId()).isEqualTo("id");
    assertThat(metadata.getSubject()).hasValue("foo");
    assertThat(metadata.getData()).isEqualTo("Hello Binary 2");
    assertThat(record.getPayload()).isEqualTo("Hello Binary 2");
}
Also used : ArrayList(java.util.ArrayList) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) KafkaRecord(io.smallrye.reactive.messaging.kafka.KafkaRecord) JsonObject(io.vertx.core.json.JsonObject) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Header(org.apache.kafka.common.header.Header) IncomingKafkaCloudEventMetadata(io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) Test(org.junit.jupiter.api.Test)

Example 5 with IncomingKafkaCloudEventMetadata

use of io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata in project smallrye-reactive-messaging by smallrye.

the class KafkaSourceWithCloudEventsTest method testReceivingStructuredCloudEventsWithStringDeserializer.

@SuppressWarnings("unchecked")
@Test
public void testReceivingStructuredCloudEventsWithStringDeserializer() {
    KafkaMapBasedConfig config = newCommonConfig();
    config.put("topic", topic);
    config.put("value.deserializer", StringDeserializer.class.getName());
    config.put("channel-name", topic);
    KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
    source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
    List<Message<?>> messages = new ArrayList<>();
    source.getStream().subscribe().with(messages::add);
    companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, null, new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("subject", "foo").put("datacontenttype", "application/json").put("dataschema", "http://schema.io").put("time", "2020-07-23T09:12:34Z").put("data", new JsonObject().put("name", "neo")).encode(), Collections.singletonList(new RecordHeader("content-type", "application/cloudevents+json; charset=utf-8".getBytes()))));
    await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
    Message<?> message = messages.get(0);
    IncomingKafkaCloudEventMetadata<String, JsonObject> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
    assertThat(metadata).isNotNull();
    assertThat(metadata.getSpecVersion()).isEqualTo(CloudEventMetadata.CE_VERSION_1_0);
    assertThat(metadata.getType()).isEqualTo("type");
    assertThat(metadata.getId()).isEqualTo("id");
    assertThat(metadata.getSource()).isEqualTo(URI.create("test://test"));
    assertThat(metadata.getSubject()).hasValue("foo");
    assertThat(metadata.getDataContentType()).hasValue("application/json");
    assertThat(metadata.getDataSchema()).hasValue(URI.create("http://schema.io"));
    assertThat(metadata.getTimeStamp()).isNotEmpty();
    assertThat(metadata.getData().getString("name")).isEqualTo("neo");
    // Extensions
    assertThat(metadata.getKey()).isNull();
    // Rule 3.1 - partitionkey attribute
    assertThat(metadata.<String>getExtension("partitionkey")).isEmpty();
    assertThat(metadata.getTopic()).isEqualTo(topic);
    assertThat(message.getPayload()).isInstanceOf(JsonObject.class);
    assertThat(((JsonObject) message.getPayload()).getString("name")).isEqualTo("neo");
}
Also used : Message(org.eclipse.microprofile.reactive.messaging.Message) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ArrayList(java.util.ArrayList) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) JsonObject(io.vertx.core.json.JsonObject) IncomingKafkaCloudEventMetadata(io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata) KafkaConnectorIncomingConfiguration(io.smallrye.reactive.messaging.kafka.KafkaConnectorIncomingConfiguration) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) KafkaMapBasedConfig(io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig) Test(org.junit.jupiter.api.Test)

Aggregations

IncomingKafkaCloudEventMetadata (io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata)17 RecordHeader (org.apache.kafka.common.header.internals.RecordHeader)17 ArrayList (java.util.ArrayList)16 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)16 Test (org.junit.jupiter.api.Test)16 Message (org.eclipse.microprofile.reactive.messaging.Message)15 KafkaConnectorIncomingConfiguration (io.smallrye.reactive.messaging.kafka.KafkaConnectorIncomingConfiguration)14 KafkaMapBasedConfig (io.smallrye.reactive.messaging.kafka.base.KafkaMapBasedConfig)14 JsonObject (io.vertx.core.json.JsonObject)12 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 Header (org.apache.kafka.common.header.Header)8 JsonObjectDeserializer (io.vertx.kafka.client.serialization.JsonObjectDeserializer)4 CloudEventMetadata (io.smallrye.reactive.messaging.ce.CloudEventMetadata)2 KafkaRecord (io.smallrye.reactive.messaging.kafka.KafkaRecord)2 DefaultCloudEventMetadataBuilder (io.smallrye.reactive.messaging.ce.DefaultCloudEventMetadataBuilder)1 OutgoingCloudEventMetadata (io.smallrye.reactive.messaging.ce.OutgoingCloudEventMetadata)1 BaseCloudEventMetadata (io.smallrye.reactive.messaging.ce.impl.BaseCloudEventMetadata)1 DefaultIncomingCloudEventMetadata (io.smallrye.reactive.messaging.ce.impl.DefaultIncomingCloudEventMetadata)1 CE_KAFKA_KEY (io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata.CE_KAFKA_KEY)1 CE_KAFKA_TOPIC (io.smallrye.reactive.messaging.kafka.IncomingKafkaCloudEventMetadata.CE_KAFKA_TOPIC)1