use of io.smallrye.reactive.messaging.kafka.Record in project smallrye-reactive-messaging by smallrye.
the class KafkaCloudEventHelper method createBinaryRecord.
@SuppressWarnings("rawtypes")
public static ProducerRecord<?, ?> createBinaryRecord(Message<?> message, String topic, OutgoingKafkaRecordMetadata<?> outgoingMetadata, IncomingKafkaRecordMetadata<?, ?> incomingMetadata, OutgoingCloudEventMetadata<?> ceMetadata, RuntimeKafkaSinkConfiguration configuration) {
if (ceMetadata == null) {
ceMetadata = OutgoingCloudEventMetadata.builder().build();
}
Integer partition = getPartition(outgoingMetadata, configuration);
Object key = getKey(message, outgoingMetadata, ceMetadata, configuration);
Long timestamp = getTimestamp(outgoingMetadata);
List<Header> headers = getHeaders(outgoingMetadata, incomingMetadata, configuration);
Optional<String> subject = getSubject(ceMetadata, configuration);
Optional<String> contentType = getDataContentType(ceMetadata, configuration);
Optional<URI> schema = getDataSchema(ceMetadata, configuration);
// Add the Cloud Event header - prefixed with ce_ (rules 3.2.3.1)
// Mandatory headers
headers.add(new RecordHeader(KAFKA_HEADER_FOR_SPEC_VERSION, ceMetadata.getSpecVersion().getBytes(StandardCharsets.UTF_8)));
headers.add(new RecordHeader(KAFKA_HEADER_FOR_ID, ceMetadata.getId().getBytes(StandardCharsets.UTF_8)));
String type = getType(ceMetadata, configuration);
headers.add(new RecordHeader(KAFKA_HEADER_FOR_TYPE, type.getBytes(StandardCharsets.UTF_8)));
String source = getSource(ceMetadata, configuration);
headers.add(new RecordHeader(KAFKA_HEADER_FOR_SOURCE, source.getBytes(StandardCharsets.UTF_8)));
// Optional attribute
subject.ifPresent(s -> headers.add(new RecordHeader(KAFKA_HEADER_FOR_SUBJECT, s.getBytes(StandardCharsets.UTF_8))));
contentType.ifPresent(s -> {
headers.add(new RecordHeader(KAFKA_HEADER_FOR_CONTENT_TYPE, s.getBytes(StandardCharsets.UTF_8)));
// Rules 3.2.1 - in binary mode, the content-type header must be mapped to the datacontenttype attribute.
headers.add(new RecordHeader("content-type", s.getBytes(StandardCharsets.UTF_8)));
});
schema.ifPresent(s -> headers.add(new RecordHeader(KAFKA_HEADER_FOR_SCHEMA, s.toString().getBytes(StandardCharsets.UTF_8))));
Optional<ZonedDateTime> ts = ceMetadata.getTimeStamp();
if (ts.isPresent()) {
ZonedDateTime time = ts.get();
headers.add(new RecordHeader(KAFKA_HEADER_FOR_TIME, RFC3339_DATE_FORMAT.format(time).getBytes(StandardCharsets.UTF_8)));
} else if (timestamp != null) {
Instant instant = Instant.ofEpochMilli(timestamp);
headers.add(new RecordHeader(KAFKA_HEADER_FOR_TIME, RFC3339_DATE_FORMAT.format(instant).getBytes(StandardCharsets.UTF_8)));
} else if (configuration.getCloudEventsInsertTimestamp()) {
ZonedDateTime now = ZonedDateTime.now();
headers.add(new RecordHeader(KAFKA_HEADER_FOR_TIME, RFC3339_DATE_FORMAT.format(now).getBytes(StandardCharsets.UTF_8)));
}
// Extensions
ceMetadata.getExtensions().forEach((k, v) -> {
if (v != null) {
headers.add(new RecordHeader(CE_HEADER_PREFIX + k, v.toString().getBytes(StandardCharsets.UTF_8)));
}
});
Object payload = message.getPayload();
if (payload instanceof Record) {
payload = ((Record) payload).value();
}
return new ProducerRecord<>(topic, partition, timestamp, key, payload, headers);
}
use of io.smallrye.reactive.messaging.kafka.Record in project smallrye-reactive-messaging by smallrye.
the class KafkaCloudEventHelper method createFromBinaryCloudEvent.
public static <T, K> IncomingKafkaCloudEventMetadata<K, T> createFromBinaryCloudEvent(ConsumerRecord<?, T> record) {
DefaultCloudEventMetadataBuilder<T> builder = new DefaultCloudEventMetadataBuilder<>();
// Build a map containing all the headers
// We remove the entry at each access to filter out extension attribute.
Map<String, String> headers = new HashMap<>();
record.headers().forEach(kh -> {
String key = kh.key();
// Rules 3.2.3 - Force UTF-8
String value = new String(kh.value(), StandardCharsets.UTF_8);
headers.put(key, value);
});
// Required
builder.withSpecVersion(headers.remove(KAFKA_HEADER_FOR_SPEC_VERSION));
builder.withId(headers.remove(KAFKA_HEADER_FOR_ID));
String source = headers.remove(KAFKA_HEADER_FOR_SOURCE);
if (source == null) {
throw new IllegalArgumentException("The Kafka record must contain the " + KAFKA_HEADER_FOR_SOURCE + " header");
}
builder.withSource(URI.create(source));
builder.withType(headers.remove(KAFKA_HEADER_FOR_TYPE));
// Optional
// Rules 3.2.1 - Set datacontenttype to the record's content type header
String ct = headers.remove(KAFKA_HEADER_CONTENT_TYPE);
if (ct != null) {
builder.withDataContentType(ct);
}
String schema = headers.remove(KAFKA_HEADER_FOR_SCHEMA);
if (schema != null) {
builder.withDataSchema(URI.create(schema));
}
String subject = headers.remove(KAFKA_HEADER_FOR_SUBJECT);
if (subject != null) {
builder.withSubject(subject);
}
String time = headers.remove(KAFKA_HEADER_FOR_TIME);
if (time != null) {
ZonedDateTime parse = ZonedDateTime.parse(time, RFC3339_DATE_FORMAT);
builder.withTimestamp(parse);
}
// Extensions
if (record.key() != null) {
builder.withExtension(CE_KAFKA_KEY, record.key());
}
builder.withExtension(CE_KAFKA_TOPIC, record.topic());
headers.entrySet().stream().filter(entry -> entry.getKey().startsWith(CE_HEADER_PREFIX)).forEach(entry -> {
String key = entry.getKey().substring(CE_HEADER_PREFIX.length());
// Implementation choice: Extension attributes are stored as String.
builder.withExtension(key, entry.getValue());
});
// Data
builder.withData(record.value());
BaseCloudEventMetadata<T> cloudEventMetadata = builder.build();
return new DefaultIncomingKafkaCloudEventMetadata<>(new DefaultIncomingCloudEventMetadata<>(cloudEventMetadata));
}
use of io.smallrye.reactive.messaging.kafka.Record in project smallrye-reactive-messaging by smallrye.
the class KafkaSink method getProducerRecord.
@SuppressWarnings("rawtypes")
private ProducerRecord<?, ?> getProducerRecord(Message<?> message, OutgoingKafkaRecordMetadata<?> om, IncomingKafkaRecordMetadata<?, ?> im, String actualTopic) {
int actualPartition = om == null || om.getPartition() <= -1 ? this.partition : om.getPartition();
Object actualKey = getKey(message, om);
long actualTimestamp;
if ((om == null) || (om.getTimestamp() == null)) {
actualTimestamp = -1;
} else {
actualTimestamp = (om.getTimestamp() != null) ? om.getTimestamp().toEpochMilli() : -1;
}
Headers kafkaHeaders = new RecordHeaders();
if (!StringUtils.isNullOrEmpty(this.runtimeConfiguration.getPropagateHeaders()) && im != null && im.getHeaders() != null) {
Set<String> configuredHeaders = Arrays.stream(this.runtimeConfiguration.getPropagateHeaders().split(",")).map(String::trim).collect(Collectors.toSet());
Iterator<Header> iterator = im.getHeaders().iterator();
while (iterator.hasNext()) {
Header header = iterator.next();
if (configuredHeaders.contains(header.key())) {
kafkaHeaders.add(header);
}
}
}
// add outgoing metadata headers, and override incoming headers if needed
if (om != null && om.getHeaders() != null) {
om.getHeaders().forEach(kafkaHeaders::add);
}
createOutgoingTrace(message, actualTopic, actualPartition, kafkaHeaders);
Object payload = message.getPayload();
if (payload instanceof Record) {
payload = ((Record) payload).value();
}
return new ProducerRecord<>(actualTopic, actualPartition == -1 ? null : actualPartition, actualTimestamp == -1L ? null : actualTimestamp, actualKey, payload, kafkaHeaders);
}
use of io.smallrye.reactive.messaging.kafka.Record in project smallrye-reactive-messaging by smallrye.
the class KafkaCloudEventHelper method createStructuredRecord.
@SuppressWarnings("rawtypes")
public static ProducerRecord<?, ?> createStructuredRecord(Message<?> message, String topic, OutgoingKafkaRecordMetadata<?> outgoingMetadata, IncomingKafkaRecordMetadata<?, ?> incomingMetadata, OutgoingCloudEventMetadata<?> ceMetadata, RuntimeKafkaSinkConfiguration configuration) {
if (ceMetadata == null) {
ceMetadata = OutgoingCloudEventMetadata.builder().build();
}
Integer partition = getPartition(outgoingMetadata, configuration);
Object key = getKey(message, outgoingMetadata, ceMetadata, configuration);
Long timestamp = getTimestamp(outgoingMetadata);
List<Header> headers = getHeaders(outgoingMetadata, incomingMetadata, configuration);
String source = getSource(ceMetadata, configuration);
String type = getType(ceMetadata, configuration);
Optional<String> subject = getSubject(ceMetadata, configuration);
Optional<String> dataContentType = getDataContentType(ceMetadata, configuration);
Optional<URI> schema = getDataSchema(ceMetadata, configuration);
// if headers does not contain a "content-type" header add one
Optional<Header> contentType = headers.stream().filter(h -> h.key().equalsIgnoreCase(KAFKA_HEADER_CONTENT_TYPE)).findFirst();
if (!contentType.isPresent()) {
headers.add(new RecordHeader(KAFKA_HEADER_CONTENT_TYPE, STRUCTURED_CONTENT_TYPE.getBytes()));
}
// We need to build the JSON Object representing the Cloud Event
JsonObject json = new JsonObject();
json.put(CE_ATTRIBUTE_SPEC_VERSION, ceMetadata.getSpecVersion()).put(CE_ATTRIBUTE_TYPE, type).put(CE_ATTRIBUTE_SOURCE, source).put(CE_ATTRIBUTE_ID, ceMetadata.getId());
ZonedDateTime time = ceMetadata.getTimeStamp().orElse(null);
if (time != null) {
json.put(CE_ATTRIBUTE_TIME, time.toInstant());
} else if (configuration.getCloudEventsInsertTimestamp()) {
json.put(CE_ATTRIBUTE_TIME, Instant.now());
}
schema.ifPresent(s -> json.put(CE_ATTRIBUTE_DATA_SCHEMA, s));
dataContentType.ifPresent(s -> json.put(CE_ATTRIBUTE_DATA_CONTENT_TYPE, s));
subject.ifPresent(s -> json.put(CE_ATTRIBUTE_SUBJECT, s));
// Extensions
ceMetadata.getExtensions().forEach(json::put);
// Encode the payload to json
Object payload = message.getPayload();
if (payload instanceof Record) {
payload = ((Record) payload).value();
}
if (payload instanceof String) {
json.put("data", payload);
} else {
json.put("data", JsonObject.mapFrom(payload));
}
return new ProducerRecord<>(topic, partition, timestamp, key, json.encode(), headers);
}
Aggregations