use of io.smallrye.reactive.messaging.ce.CloudEventMetadata in project smallrye-reactive-messaging by smallrye.
the class KafkaSourceWithCloudEventsTest method testReceivingStructuredCloudEventsWithoutMatchingContentTypeIsNotReadACloudEvent.
@SuppressWarnings("unchecked")
@Test
public void testReceivingStructuredCloudEventsWithoutMatchingContentTypeIsNotReadACloudEvent() {
KafkaMapBasedConfig config = newCommonConfig();
config.put("topic", topic);
config.put("value.deserializer", StringDeserializer.class.getName());
config.put("channel-name", topic);
KafkaConnectorIncomingConfiguration ic = new KafkaConnectorIncomingConfiguration(config);
source = new KafkaSource<>(vertx, UUID.randomUUID().toString(), ic, UnsatisfiedInstance.instance(), CountKafkaCdiEvents.noCdiEvents, UnsatisfiedInstance.instance(), -1);
List<Message<?>> messages = new ArrayList<>();
source.getStream().subscribe().with(messages::add);
companion.produceStrings().fromRecords(new ProducerRecord<>(topic, null, null, "key", new JsonObject().put("specversion", CloudEventMetadata.CE_VERSION_1_0).put("type", "type").put("id", "id").put("source", "test://test").put("data", new JsonObject().put("name", "neo")).encode(), Collections.singletonList(new RecordHeader("content-type", "application/json; charset=utf-8".getBytes()))));
await().atMost(2, TimeUnit.MINUTES).until(() -> messages.size() >= 1);
Message<?> message = messages.get(0);
IncomingKafkaCloudEventMetadata<String, JsonObject> metadata = message.getMetadata(IncomingKafkaCloudEventMetadata.class).orElse(null);
assertThat(metadata).isNull();
CloudEventMetadata<JsonObject> metadata2 = message.getMetadata(CloudEventMetadata.class).orElse(null);
assertThat(metadata2).isNull();
assertThat(message.getPayload()).isInstanceOf(String.class);
JsonObject json = new JsonObject(message.getPayload().toString());
assertThat(json.getString("id")).isEqualTo("id");
}
use of io.smallrye.reactive.messaging.ce.CloudEventMetadata in project smallrye-reactive-messaging by smallrye.
the class KafkaCloudEventHelper method createFromBinaryCloudEvent.
public static <T, K> IncomingKafkaCloudEventMetadata<K, T> createFromBinaryCloudEvent(ConsumerRecord<?, T> record) {
DefaultCloudEventMetadataBuilder<T> builder = new DefaultCloudEventMetadataBuilder<>();
// Build a map containing all the headers
// We remove the entry at each access to filter out extension attribute.
Map<String, String> headers = new HashMap<>();
record.headers().forEach(kh -> {
String key = kh.key();
// Rules 3.2.3 - Force UTF-8
String value = new String(kh.value(), StandardCharsets.UTF_8);
headers.put(key, value);
});
// Required
builder.withSpecVersion(headers.remove(KAFKA_HEADER_FOR_SPEC_VERSION));
builder.withId(headers.remove(KAFKA_HEADER_FOR_ID));
String source = headers.remove(KAFKA_HEADER_FOR_SOURCE);
if (source == null) {
throw new IllegalArgumentException("The Kafka record must contain the " + KAFKA_HEADER_FOR_SOURCE + " header");
}
builder.withSource(URI.create(source));
builder.withType(headers.remove(KAFKA_HEADER_FOR_TYPE));
// Optional
// Rules 3.2.1 - Set datacontenttype to the record's content type header
String ct = headers.remove(KAFKA_HEADER_CONTENT_TYPE);
if (ct != null) {
builder.withDataContentType(ct);
}
String schema = headers.remove(KAFKA_HEADER_FOR_SCHEMA);
if (schema != null) {
builder.withDataSchema(URI.create(schema));
}
String subject = headers.remove(KAFKA_HEADER_FOR_SUBJECT);
if (subject != null) {
builder.withSubject(subject);
}
String time = headers.remove(KAFKA_HEADER_FOR_TIME);
if (time != null) {
ZonedDateTime parse = ZonedDateTime.parse(time, RFC3339_DATE_FORMAT);
builder.withTimestamp(parse);
}
// Extensions
if (record.key() != null) {
builder.withExtension(CE_KAFKA_KEY, record.key());
}
builder.withExtension(CE_KAFKA_TOPIC, record.topic());
headers.entrySet().stream().filter(entry -> entry.getKey().startsWith(CE_HEADER_PREFIX)).forEach(entry -> {
String key = entry.getKey().substring(CE_HEADER_PREFIX.length());
// Implementation choice: Extension attributes are stored as String.
builder.withExtension(key, entry.getValue());
});
// Data
builder.withData(record.value());
BaseCloudEventMetadata<T> cloudEventMetadata = builder.build();
return new DefaultIncomingKafkaCloudEventMetadata<>(new DefaultIncomingCloudEventMetadata<>(cloudEventMetadata));
}
Aggregations