use of org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class DebeziumOperandControllerTest method reify.
@Test
void reify() {
KubernetesClient kubernetesClient = Mockito.mock(KubernetesClient.class);
DebeziumOperandController controller = new DebeziumOperandController(kubernetesClient, CONFIGURATION);
final String kcsB64 = Base64.getEncoder().encodeToString("kcs".getBytes(StandardCharsets.UTF_8));
final String pwdB64 = Base64.getEncoder().encodeToString("orderpw".getBytes(StandardCharsets.UTF_8));
var spec = Serialization.jsonMapper().createObjectNode().put("database.hostname", "orderdb").put("database.port", "5432").put("database.user", "orderuser").put("database.dbname", "orderdb").put("database.server.name", "dbserver1").put("schema.include.list", "purchaseorder").put("table.include.list", "purchaseorder.outboxevent").put("tombstones.on.delete", "false").put("key.converter", "org.apache.kafka.connect.storage.StringConverter").put("value.converter", "org.apache.kafka.connect.storage.StringConverter").put("transforms", "saga").put("transforms.saga.type", "io.debezium.transforms.outbox.EventRouter").put("transforms.saga.route.topic.replacement", "${routedByValue}.request").put("poll.interval.ms", "100").put("consumer.interceptor.classes", "io.opentracing.contrib.kafka.TracingConsumerInterceptor").put("producer.interceptor.classes", "io.opentracing.contrib.kafka.TracingProducerInterceptor");
spec.with("data_shape").put("key", "JSON").put("value", "JSON");
spec.with("database.password").put("kind", "base64").put("value", pwdB64);
var resources = controller.doReify(new ManagedConnectorBuilder().withMetadata(new ObjectMetaBuilder().withName(DEFAULT_MANAGED_CONNECTOR_ID).build()).withSpec(new ManagedConnectorSpecBuilder().withConnectorId(DEFAULT_MANAGED_CONNECTOR_ID).withDeploymentId(DEFAULT_DEPLOYMENT_ID).withDeployment(new DeploymentSpecBuilder().withConnectorTypeId(DEFAULT_CONNECTOR_TYPE_ID).withSecret("secret").withKafka(new KafkaSpecBuilder().withUrl(DEFAULT_KAFKA_SERVER).build()).withConnectorResourceVersion(DEFAULT_CONNECTOR_REVISION).withDeploymentResourceVersion(DEFAULT_DEPLOYMENT_REVISION).withDesiredState(DESIRED_STATE_READY).build()).build()).build(), new org.bf2.cos.fleetshard.operator.debezium.DebeziumShardMetadataBuilder().withContainerImage(DEFAULT_CONNECTOR_IMAGE).withConnectorClass(PG_CLASS).build(), new ConnectorConfiguration<>(spec, ObjectNode.class), new ServiceAccountSpecBuilder().withClientId(DEFAULT_KAFKA_CLIENT_ID).withClientSecret(kcsB64).build());
assertThat(resources).anyMatch(DebeziumOperandSupport::isKafkaConnect).anyMatch(DebeziumOperandSupport::isKafkaConnector).anyMatch(DebeziumOperandSupport::isSecret);
assertThat(resources).filteredOn(DebeziumOperandSupport::isKafkaConnect).hasSize(1).first().isInstanceOfSatisfying(KafkaConnect.class, kc -> {
assertThat(kc.getSpec().getImage()).isEqualTo(DEFAULT_CONNECTOR_IMAGE);
});
assertThat(resources).filteredOn(DebeziumOperandSupport::isKafkaConnector).hasSize(1).first().isInstanceOfSatisfying(KafkaConnector.class, kc -> {
assertThat(kc.getSpec().getConfig()).containsEntry("database.password", "${file:/opt/kafka/external-configuration/" + DebeziumConstants.EXTERNAL_CONFIG_DIRECTORY + "/" + EXTERNAL_CONFIG_FILE + ":database.password}");
});
}
use of org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandSupport method createSteps.
public static List<ProcessorKamelet> createSteps(ManagedConnector connector, ConnectorConfiguration<ObjectNode> connectorConfiguration, CamelShardMetadata shardMetadata, Map<String, String> props) {
String consumes = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/consumes/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getConsumes());
String produces = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/produces/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getProduces());
final ArrayNode steps = connectorConfiguration.getProcessorsSpec();
final List<ProcessorKamelet> stepDefinitions = new ArrayList<>(steps.size() + 2);
int i = 0;
if (consumes != null) {
switch(consumes) {
case "application/json":
{
String stepName = stepName(i, "cos-decoder-json-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-json-action", stepName));
if (shardMetadata.getConsumesClass() != null) {
props.put(kameletProperty("cos-decoder-json-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
}
i++;
}
break;
case "avro/binary":
{
String stepName = stepName(i, "cos-decoder-avro-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-avro-action", stepName));
if (shardMetadata.getConsumesClass() != null) {
props.put(kameletProperty("cos-decoder-avro-action", stepName, "contentClass"), shardMetadata.getConsumesClass());
}
i++;
}
break;
case "application/x-java-object":
{
String stepName = stepName(i, "cos-decoder-pojo-action");
stepDefinitions.add(new ProcessorKamelet("cos-decoder-pojo-action", stepName));
if (produces != null) {
props.put(kameletProperty("cos-decoder-pojo-action", stepName, "mimeType"), produces);
}
i++;
}
break;
case "text/plain":
case "application/octet-stream":
break;
default:
throw new IllegalArgumentException("Unsupported value format " + consumes);
}
}
for (JsonNode step : steps) {
var element = step.fields().next();
String templateId = shardMetadata.getKamelets().getProcessors().get(element.getKey());
if (templateId == null) {
throw new IllegalArgumentException("Unknown processor: " + element.getKey());
}
stepDefinitions.add(new ProcessorKamelet(templateId, stepName(i, templateId)));
configureStep(props, (ObjectNode) element.getValue(), i, shardMetadata.getKamelets().getProcessors().get(element.getKey()));
i++;
}
if (produces != null) {
switch(produces) {
case "application/json":
{
String stepName = stepName(i, "cos-encoder-json-action");
stepDefinitions.add(new ProcessorKamelet("cos-encoder-json-action", stepName));
if (shardMetadata.getProducesClass() != null) {
props.put(kameletProperty("cos-encoder-json-action", stepName, "contentClass"), shardMetadata.getProducesClass());
}
}
break;
case "avro/binary":
{
String stepName = stepName(i, "cos-encoder-avro-action");
stepDefinitions.add(new ProcessorKamelet("cos-encoder-avro-action", stepName));
if (shardMetadata.getProducesClass() != null) {
props.put(kameletProperty("cos-encoder-avro-action", stepName, "contentClass"), shardMetadata.getProducesClass());
}
}
break;
case "text/plain":
{
stepDefinitions.add(new ProcessorKamelet("cos-encoder-string-action", stepName(i, "cos-encoder-string-action")));
}
break;
case "application/octet-stream":
{
stepDefinitions.add(new ProcessorKamelet("cos-encoder-bytearray-action", stepName(i, "cos-encoder-bytearray-action")));
}
break;
default:
throw new IllegalArgumentException("Unsupported value format " + produces);
}
}
// If it is a sink, then it consumes from kafka
if (isSink(shardMetadata)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArrayDeserializer");
if ("application/json".equals(consumes) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonDeserializer");
}
if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueDeserializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroDeserializer");
}
}
// If it is a source, then it produces to kafka
if (isSource(shardMetadata)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.bytes.ByteArraySerializer");
if ("application/json".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.json.JsonSerializer");
}
if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
props.put(String.format("camel.kamelet.%s.valueSerializer", shardMetadata.getKamelets().getKafka().getName()), "org.bf2.cos.connector.camel.serdes.avro.AvroSerializer");
}
}
return stepDefinitions;
}
use of org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class AbstractOperandController method reify.
@Override
public List<HasMetadata> reify(ManagedConnector connector, Secret secret) {
LOGGER.debug("Reifying connector: {} and secret.metadata: {}", connector, secret.getMetadata());
final ServiceAccount serviceAccountSettings = extract(secret, SECRET_ENTRY_SERVICE_ACCOUNT, ServiceAccount.class);
LOGGER.debug("Extracted serviceAccount {}", serviceAccountSettings == null ? "is null" : "with clientId: " + serviceAccountSettings.getClientId());
ServiceAccountSpec sas = serviceAccountSettings == null ? new ServiceAccountSpecBuilder().build() : new ServiceAccountSpecBuilder().withClientId(serviceAccountSettings.getClientId()).withClientSecret(serviceAccountSettings.getClientSecret()).build();
ConnectorConfiguration<S, D> connectorConfig;
try {
connectorConfig = new ConnectorConfiguration<>(extract(secret, SECRET_ENTRY_CONNECTOR, ObjectNode.class), connectorSpecType, dataShapeType);
} catch (IncompleteConnectorSpecException e) {
throw new RuntimeException("Incomplete connectorSpec for connector \"" + connector.getSpec().getConnectorId() + "@" + connector.getSpec().getDeploymentId() + "#" + connector.getSpec().getDeployment().getDeploymentResourceVersion() + "\": " + e.getLocalizedMessage(), e);
}
return doReify(connector, extract(secret, SECRET_ENTRY_META, metadataType), connectorConfig, sas);
}
use of org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandController method doReify.
@SuppressFBWarnings("HARD_CODE_PASSWORD")
@Override
protected List<HasMetadata> doReify(ManagedConnector connector, CamelShardMetadata shardMetadata, ConnectorConfiguration<ObjectNode, ObjectNode> connectorConfiguration, ServiceAccountSpec serviceAccountSpec) {
final Map<String, String> properties = createSecretsData(connector, connectorConfiguration, serviceAccountSpec, configuration);
final ObjectNode errorHandler = createErrorHandler(shardMetadata, connector, connectorConfiguration.getErrorHandlerSpec());
final List<KameletEndpoint> stepDefinitions;
final KameletEndpoint source;
final KameletEndpoint sink;
//
switch(shardMetadata.getConnectorType()) {
case CONNECTOR_TYPE_SOURCE:
source = KameletEndpoint.kamelet(shardMetadata.getKamelets().getAdapter().getName());
source.getProperties().put("id", connector.getSpec().getDeploymentId() + "-source");
configureKameletProperties(source.getProperties(), connectorConfiguration.getConnectorSpec(), shardMetadata.getKamelets().getAdapter());
sink = KameletEndpoint.kamelet(shardMetadata.getKamelets().getKafka().getName());
sink.getProperties().put("id", connector.getSpec().getDeploymentId() + "-sink");
sink.getProperties().put("bootstrapServers", connector.getSpec().getDeployment().getKafka().getUrl());
sink.getProperties().put("user", SA_CLIENT_ID_PLACEHOLDER);
sink.getProperties().put("password", SA_CLIENT_SECRET_PLACEHOLDER);
configureKameletProperties(sink.getProperties(), connectorConfiguration.getConnectorSpec(), shardMetadata.getKamelets().getKafka());
if (hasSchemaRegistry(connector)) {
sink.getProperties().put("registryUrl", connector.getSpec().getDeployment().getSchemaRegistry().getUrl());
}
stepDefinitions = createSteps(connector, connectorConfiguration, shardMetadata, sink);
break;
case CONNECTOR_TYPE_SINK:
source = KameletEndpoint.kamelet(shardMetadata.getKamelets().getKafka().getName());
source.getProperties().put("id", connector.getSpec().getDeploymentId() + "-source");
source.getProperties().put("consumerGroup", connector.getSpec().getDeploymentId());
source.getProperties().put("bootstrapServers", connector.getSpec().getDeployment().getKafka().getUrl());
source.getProperties().put("user", SA_CLIENT_ID_PLACEHOLDER);
source.getProperties().put("password", SA_CLIENT_SECRET_PLACEHOLDER);
configureKameletProperties(source.getProperties(), connectorConfiguration.getConnectorSpec(), shardMetadata.getKamelets().getKafka());
if (hasSchemaRegistry(connector)) {
source.getProperties().put("registryUrl", connector.getSpec().getDeployment().getSchemaRegistry().getUrl());
}
sink = KameletEndpoint.kamelet(shardMetadata.getKamelets().getAdapter().getName());
sink.getProperties().put("id", connector.getSpec().getDeploymentId() + "-sink");
configureKameletProperties(sink.getProperties(), connectorConfiguration.getConnectorSpec(), shardMetadata.getKamelets().getAdapter());
stepDefinitions = createSteps(connector, connectorConfiguration, shardMetadata, source);
break;
default:
throw new IllegalArgumentException("Unknown connector type: " + shardMetadata.getConnectorType());
}
final Secret secret = new Secret();
secret.setMetadata(new ObjectMeta());
secret.getMetadata().setName(connector.getMetadata().getName() + Resources.CONNECTOR_SECRET_SUFFIX);
secret.setData(Map.of(APPLICATION_PROPERTIES, asBytesBase64(properties)));
final ObjectNode integration = createIntegrationSpec(secret.getMetadata().getName(), configuration, Map.of("CONNECTOR_SECRET_NAME", secret.getMetadata().getName(), "CONNECTOR_SECRET_CHECKSUM", Secrets.computeChecksum(secret), "CONNECTOR_ID", connector.getSpec().getConnectorId(), "CONNECTOR_DEPLOYMENT_ID", connector.getSpec().getDeploymentId()));
final KameletBinding binding = new KameletBinding();
binding.setMetadata(new ObjectMeta());
binding.getMetadata().setName(connector.getMetadata().getName());
binding.getMetadata().setAnnotations(new TreeMap<>());
binding.setSpec(new KameletBindingSpec());
binding.getSpec().setSource(source);
binding.getSpec().setSink(sink);
binding.getSpec().setErrorHandler(errorHandler);
binding.getSpec().setSteps(stepDefinitions);
binding.getSpec().setIntegration(integration);
Map<String, String> annotations = binding.getMetadata().getAnnotations();
if (shardMetadata.getAnnotations() != null) {
annotations.putAll(shardMetadata.getAnnotations());
}
if (configuration.labelSelection().enabled()) {
Operator assigned = connector.getStatus().getConnectorStatus().getAssignedOperator();
if (assigned != null && assigned.getId() != null) {
annotations.putIfAbsent(KAMEL_OPERATOR_ID, assigned.getId());
}
}
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_CONTAINER_IMAGE, shardMetadata.getConnectorImage());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_KAMELETS_ENABLED, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_JVM_ENABLED, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_LOGGING_JSON, "false");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_OWNER_TARGET_LABELS, LABELS_TO_TRANSFER);
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_PROMETHEUS_ENABLED, "true");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_PROMETHEUS_POD_MONITOR, "false");
// health check annotations
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_ENABLED, "true");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_PROBE_ENABLED, "true");
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_PROBE_ENABLED, "true");
CamelOperandConfiguration.Health health = configuration.health();
if (health != null) {
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_SUCCESS_THRESHOLD, health.readinessSuccessThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_FAILURE_THRESHOLD, health.readinessFailureThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_PERIOD, health.readinessPeriodSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_READINESS_TIMEOUT, health.readinessTimeoutSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_SUCCESS_THRESHOLD, health.livenessSuccessThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_FAILURE_THRESHOLD, health.livenessFailureThreshold());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_PERIOD, health.livenessPeriodSeconds());
annotations.putIfAbsent(TRAIT_CAMEL_APACHE_ORG_HEALTH_LIVENESS_TIMEOUT, health.livenessTimeoutSeconds());
}
if (configuration.connectors() != null) {
if (configuration.connectors().traits() != null) {
annotations.putAll(configuration.connectors().traits());
}
if (configuration.connectors().types() != null) {
final String typeId = connector.getSpec().getDeployment().getConnectorTypeId();
final CamelOperandConfiguration.ConnectorConfiguration typeConfig = configuration.connectors().types().get(typeId);
if (typeConfig != null && typeConfig.traits() != null) {
annotations.putAll(typeConfig.traits());
}
}
}
return List.of(secret, binding);
}
use of org.bf2.cos.fleetshard.operator.connector.ConnectorConfiguration in project cos-fleetshard by bf2fc6cc711aee1a0c2a.
the class CamelOperandSupport method createSteps.
public static List<KameletEndpoint> createSteps(ManagedConnector connector, ConnectorConfiguration<ObjectNode, ObjectNode> connectorConfiguration, CamelShardMetadata shardMetadata, KameletEndpoint kafkaEndpoint) {
String consumes = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/consumes/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getConsumes());
String produces = Optional.ofNullable(connectorConfiguration.getDataShapeSpec()).map(spec -> spec.at("/produces/format")).filter(node -> !node.isMissingNode()).map(JsonNode::asText).orElse(shardMetadata.getProduces());
final ArrayNode steps = connectorConfiguration.getProcessorsSpec();
final List<KameletEndpoint> stepDefinitions = new ArrayList<>(steps.size() + 2);
if (consumes != null) {
switch(consumes) {
case "application/json":
stepDefinitions.add(kamelet("cos-decoder-json-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("contentClass", shardMetadata.getConsumesClass());
}
}));
break;
case "avro/binary":
stepDefinitions.add(kamelet("cos-decoder-avro-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("contentClass", shardMetadata.getConsumesClass());
}
}));
break;
case "application/x-java-object":
stepDefinitions.add(kamelet("cos-decoder-pojo-action", properties -> {
if (shardMetadata.getConsumesClass() != null) {
properties.put("mimeType", produces);
}
}));
break;
case "text/plain":
case "application/octet-stream":
break;
default:
throw new IllegalArgumentException("Unsupported value format " + consumes);
}
}
for (JsonNode step : steps) {
var element = step.fields().next();
String templateId = shardMetadata.getKamelets().getProcessors().get(element.getKey());
if (templateId == null) {
throw new IllegalArgumentException("Unknown processor: " + element.getKey());
}
stepDefinitions.add(configureStep(templateId, (ObjectNode) element.getValue()));
}
if (produces != null) {
switch(produces) {
case "application/json":
stepDefinitions.add(kamelet("cos-encoder-json-action", properties -> {
if (shardMetadata.getProducesClass() != null) {
properties.put("contentClass", shardMetadata.getProducesClass());
}
}));
break;
case "avro/binary":
stepDefinitions.add(kamelet("cos-encoder-avro-action", properties -> {
if (shardMetadata.getProducesClass() != null) {
properties.put("contentClass", shardMetadata.getProducesClass());
}
}));
break;
case "text/plain":
stepDefinitions.add(kamelet("cos-encoder-string-action"));
break;
case "application/octet-stream":
stepDefinitions.add(kamelet("cos-encoder-bytearray-action"));
break;
default:
throw new IllegalArgumentException("Unsupported value format " + produces);
}
}
// If it is a sink, then it consumes from kafka
if (isSink(shardMetadata)) {
String valueDeserializer = "org.bf2.cos.connector.camel.serdes.bytes.ByteArrayDeserializer";
if ("application/json".equals(consumes) && hasSchemaRegistry(connector)) {
valueDeserializer = "org.bf2.cos.connector.camel.serdes.json.JsonDeserializer";
} else if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
valueDeserializer = "org.bf2.cos.connector.camel.serdes.avro.AvroDeserializer";
}
kafkaEndpoint.getProperties().put("valueDeserializer", valueDeserializer);
}
// If it is a source, then it produces to kafka
if (isSource(shardMetadata)) {
String valueSerializer = "org.bf2.cos.connector.camel.serdes.bytes.ByteArraySerializer";
if ("application/json".equals(produces) && hasSchemaRegistry(connector)) {
valueSerializer = "org.bf2.cos.connector.camel.serdes.json.JsonSerializer";
} else if ("avro/binary".equals(produces) && hasSchemaRegistry(connector)) {
valueSerializer = "org.bf2.cos.connector.camel.serdes.avro.AvroSerializer";
}
kafkaEndpoint.getProperties().put("valueSerializer", valueSerializer);
}
return stepDefinitions;
}
Aggregations