use of org.bf2.cos.fleetshard.operator.camel.model.Kamelet in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandSupport method createSteps.
public static List<KameletEndpoint> createSteps(ManagedConnector connector, ConnectorConfiguration<ObjectNode, ObjectNode> connectorConfiguration, CamelShardMetadata shardMetadata, KameletEndpoint kafkaEndpoint) {
String consumes = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/consumes/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getConsumes());
String produces = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/produces/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getProduces());
final ArrayNode steps = connectorConfiguration.getProcessorsSpec();
final List<KameletEndpoint> stepDefinitions = new ArrayList<>(steps.size() + 2);
if (consumes != null) {
switch(consumes) {
case "application/json":
stepDefinitions.add(kamelet("cos-decoder-json-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("contentClass", shardMetadata.getConsumesClass());
}
}));
break;
case "avro/binary":
stepDefinitions.add(kamelet("cos-decoder-avro-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("contentClass", shardMetadata.getConsumesClass());
}
}));
break;
case "application/x-java-object":
stepDefinitions.add(kamelet("cos-decoder-pojo-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("mimeType", produces);
}
}));
break;
case "text/plain":
case "application/octet-stream":
break;
default:
throw new IllegalArgumentException("Unsupported value format " + consumes);
}
}
for (JsonNode step : steps) {
var element = step.fields().next();
String templateId = shardMetadata.getKamelets().getProcessors().get(element.getKey());
if (templateId == null) {
throw new IllegalArgumentException("Unknown processor: " + element.getKey());
}
stepDefinitions.add(configureStep(templateId, (ObjectNode) element.getValue()));
}
if (produces != null) {
switch(produces) {
case "application/json":
stepDefinitions.add(kamelet("cos-encoder-json-action", properties -> {
if (shardMetadata.getProducesClass() != null) {
properties.put("contentClass", shardMetadata.getProducesClass());
}
}));
break;
case "avro/binary":
stepDefinitions.add(kamelet("cos-encoder-avro-action", properties -> {
if (shardMetadata.getProducesClass() != null) {
properties.put("contentClass", shardMetadata.getProducesClass());
}
}));
break;
case "text/plain":
stepDefinitions.add(kamelet("cos-encoder-string-action"));
break;
case "application/octet-stream":
stepDefinitions.add(kamelet("cos-encoder-bytearray-action"));
break;
default:
throw new IllegalArgumentException("Unsupported value format " + produces);
}
}
// If it is a sink, then it consumes from kafka
if (isSink(shardMetadata)) {
String valueDeserializer = "org.bf2.cos.connector.camel.serdes.bytes.ByteArrayDeserializer";
if ("application/json".equals(consumes) && hasSchemaRegistry(connector)) {
valueDeserializer = "org.bf2.cos.connector.camel.serdes.json.JsonDeserializer";
} else if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
valueDeserializer = "org.bf2.cos.connector.camel.serdes.avro.AvroDeserializer";
}
kafkaEndpoint.getProperties().put("valueDeserializer", valueDeserializer);
}
// If it is a source, then it produces to kafka
if (isSource(shardMetadata)) {
String valueSerializer = "org.bf2.cos.connector.camel.serdes.bytes.ByteArraySerializer";
if ("application/json".equals(produces) && hasSchemaRegistry(connector)) {
valueSerializer = "org.bf2.cos.connector.camel.serdes.json.JsonSerializer";
} else if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
valueSerializer = "org.bf2.cos.connector.camel.serdes.avro.AvroSerializer";
}
kafkaEndpoint.getProperties().put("valueSerializer", valueSerializer);
}
return stepDefinitions;
}
Aggregations