use of org.bf2.cos.fleetshard.operator.camel.model.ProcessorKamelet in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandSupport method createSteps.
public static List<ProcessorKamelet> createSteps(ManagedConnector connector, ConnectorConfiguration<ObjectNode> connectorConfiguration, CamelShardMetadata shardMetadata, Map<String, String> props) {
String consumes = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/consumes/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getConsumes());
String produces = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/produces/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getProduces());
final ArrayNode steps = connectorConfiguration.getProcessorsSpec();
final List<ProcessorKamelet> stepDefinitions = new ArrayList<>(steps.size() + 2);
int i = 0;
if (consumes != null) {
switch(consumes) {
case "application/json":
{
String stepName = stepName(i, "cos-decoder-json-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-json-action", stepName));
if (shardMetadata.getConsumesClass() != null) {
props.put(kameletProperty("cos-decoder-json-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
}
i++;
}
break;
case "avro/binary":
{
String stepName = stepName(i, "cos-decoder-avro-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-avro-action", stepName));
if (shardMetadata.getConsumesClass() != null) {
props.put(kameletProperty("cos-decoder-avro-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
}
i++;
}
break;
case "application/x-java-object":
{
String stepName = stepName(i, "cos-decoder-pojo-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-pojo-action", stepName));
if (produces != null) {
props.put(kameletProperty("cos-decoder-pojo-action", stepName, "mimeType"), produces);
}
i++;
}
break;
case "text/plain":
case "application/octet-stream":
break;
default:
throw new IllegalArgumentException("Unsupported value format " + consumes);
}
}
for (JsonNode step : steps) {
var element = step.fields().next();
String templateId = shardMetadata.getKamelets().getProcessors().get(element.getKey());
if (templateId == null) {
throw new IllegalArgumentException("Unknown processor: " + element.getKey());
}
stepDefinitions.add(new ProcessorKamelet(templateId, stepName(i, templateId)));
configureStep(props, (ObjectNode) element.getValue(), i, shardMetadata.getKamelets().getProcessors().get(element.getKey()));
i++;
}
if (produces != null) {
switch(produces) {
case "application/json":
{
String stepName = stepName(i, "cos-encoder-json-action");
stepDefinitions.add(new ProcessorKamelet("cos-encoder-json-action", stepName));
if (shardMetadata.getProducesClass() != null) {
props.put(kameletProperty("cos-encoder-json-action", stepName, "contentClass"), shardMetadata.getProducesClass());
}
}
break;
case "avro/binary":
{
String stepName = stepName(i, "cos-encoder-avro-action");
stepDefinitions.add(new ProcessorKamelet("cos-encoder-avro-action", stepName));
if (shardMetadata.getProducesClass() != null) {
props.put(kameletProperty("cos-encoder-avro-action", stepName, "contentClass"), shardMetadata.getProducesClass());
}
}
break;
case "text/plain":
{
stepDefinitions.add(new ProcessorKamelet("cos-encoder-string-action", stepName(i, "cos-encoder-string-action")));
}
break;
case "application/octet-stream":
{
stepDefinitions.add(new ProcessorKamelet("cos-encoder-bytearray-action", stepName(i, "cos-encoder-bytearray-action")));
}
break;
default:
throw new IllegalArgumentException("Unsupported value format " + produces);
}
}
// If it is a sink, then it consumes from kafka
if (isSink(shardMetadata)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArrayDeserializer");
if ("application/json".equals(consumes) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonDeserializer");
}
if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroDeserializer");
}
}
// If it is a source, then it produces to kafka
if (isSource(shardMetadata)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArraySerializer");
if ("application/json".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonSerializer");
}
if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroSerializer");
}
}
return stepDefinitions;
}
use of org.bf2.cos.fleetshard.operator.camel.model.ProcessorKamelet in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandController method doReify.
@Override
protected List<HasMetadata> doReify(ManagedConnector connector, CamelShardMetadata shardMetadata, ConnectorConfiguration<ObjectNode> connectorConfiguration, ServiceAccountSpec serviceAccountSpec) {
final Map<String, String> properties = createSecretsData(connector, shardMetadata, connectorConfiguration, serviceAccountSpec, configuration, new TreeMap<>());
final List<ProcessorKamelet> stepDefinitions = createSteps(connector, connectorConfiguration, shardMetadata, properties);
final String source;
final String sink;
switch(shardMetadata.getConnectorType()) {
case CONNECTOR_TYPE_SOURCE:
source = shardMetadata.getKamelets().getAdapter().getName();
sink = shardMetadata.getKamelets().getKafka().getName();
break;
case CONNECTOR_TYPE_SINK:
source = shardMetadata.getKamelets().getKafka().getName();
sink = shardMetadata.getKamelets().getAdapter().getName();
break;
default:
throw new IllegalArgumentException("Unknown connector type: " + shardMetadata.getConnectorType());
}
final Secret secret = new SecretBuilder().withMetadata(new ObjectMetaBuilder().withName(connector.getMetadata().getName() + Resources.CONNECTOR_SECRET_SUFFIX).build()).addToData(APPLICATION_PROPERTIES, asBytesBase64(properties)).build();
final KameletBinding binding = new KameletBindingBuilder().withMetadata(new ObjectMetaBuilder().withName(connector.getMetadata().getName()).build()).withSpec(new KameletBindingSpecBuilder().withIntegration(createIntegrationSpec(secret.getMetadata().getName(), configuration, Map.of("CONNECTOR_SECRET_NAME", secret.getMetadata().getName(), "CONNECTOR_SECRET_CHECKSUM", Secrets.computeChecksum(secret), "CONNECTOR_ID", connector.getSpec().getConnectorId(), "CONNECTOR_DEPLOYMENT_ID", connector.getSpec().getDeploymentId()))).withSource(KameletEndpoint.kamelet(source, Map.of("id", connector.getSpec().getDeploymentId() + "-source"))).withSink(KameletEndpoint.kamelet(sink, Map.of("id", connector.getSpec().getDeploymentId() + "-sink"))).withErrorHandler(createErrorHandler(connectorConfiguration.getErrorHandlerSpec())).withSteps(stepDefinitions.stream().map(s -> KameletEndpoint.kamelet(s.getTemplateId(), Map.of("id", s.getId()))).collect(Collectors.toList())).build()).build();
Map<String, String> annotations = KubernetesResourceUtil.getOrCreateAnnotations(binding);
if (shardMetadata.getAnnotations() != null) {
annotations.putAll(shardMetadata.getAnnotations());
}
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_CONTAINER_IMAGE, shardMetadata.getConnectorImage());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_KAMELETS_ENABLED, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_JVM_ENABLED, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_LOGGING_JSON, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_OWNER_TARGET_LABELS, LABELS_TO_TRANSFER);
// health check annotations
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_ENABLED, "true");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_PROBE_ENABLED, "true");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_PROBE_ENABLED, "true");
Health health = configuration.health();
if (health != null) {
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_SUCCESS_THRESHOLD, health.readinessSuccessThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_FAILURE_THRESHOLD, health.readinessFailureThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_PERIOD, health.readinessPeriodSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_TIMEOUT, health.readinessTimeoutSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_SUCCESS_THRESHOLD, health.livenessSuccessThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_FAILURE_THRESHOLD, health.livenessFailureThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_PERIOD, health.livenessPeriodSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_TIMEOUT, health.livenessTimeoutSeconds());
}
return List.of(secret, binding);
}
Aggregations