use of io.kestra.runner.kafka.services.KafkaStreamsBuilder in project kestra by kestra-io.
the class ExecutorFlowTrigger method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
// trigger
builder.addStateStore(Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(TRIGGER_MULTIPLE_STATE_STORE_NAME), Serdes.String(), JsonSerde.of(MultipleConditionWindow.class)));
KStream<String, io.kestra.runner.kafka.streams.ExecutorFlowTrigger> stream = builder.stream(kafkaAdminService.getTopicName(io.kestra.runner.kafka.streams.ExecutorFlowTrigger.class), Consumed.with(Serdes.String(), JsonSerde.of(io.kestra.runner.kafka.streams.ExecutorFlowTrigger.class)).withName("KStream.ExecutorFlowTrigger")).filter((key, value) -> value != null, Named.as("ExecutorFlowTrigger.filterNotNull"));
stream.transformValues(() -> new FlowTriggerWithExecutionTransformer(TRIGGER_MULTIPLE_STATE_STORE_NAME, kafkaFlowExecutor, flowService), Named.as("ExecutorFlowTrigger.transformToExecutionList"), TRIGGER_MULTIPLE_STATE_STORE_NAME).flatMap((key, value) -> value.stream().map(execution -> new KeyValue<>(execution.getId(), execution)).collect(Collectors.toList()), Named.as("ExecutorFlowTrigger.flapMapToExecution")).to(kafkaAdminService.getTopicName(Execution.class), Produced.with(Serdes.String(), JsonSerde.of(Execution.class)).withName("ExecutorFlowTrigger.toExecution"));
stream.mapValues((readOnlyKey, value) -> (io.kestra.runner.kafka.streams.ExecutorFlowTrigger) null, Named.as("ExecutorFlowTrigger.executorFlowTriggerToNull")).to(kafkaAdminService.getTopicName(io.kestra.runner.kafka.streams.ExecutorFlowTrigger.class), Produced.with(Serdes.String(), JsonSerde.of(io.kestra.runner.kafka.streams.ExecutorFlowTrigger.class)).withName("ExecutorFlowTrigger.toExecutorFlowTrigger"));
return builder;
}
use of io.kestra.runner.kafka.services.KafkaStreamsBuilder in project kestra by kestra-io.
the class ExecutorStore method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
builder.addGlobalStore(Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(FLOW_STATE_STORE_NAME), Serdes.String(), JsonSerde.of(Flow.class)), kafkaAdminService.getTopicName(Flow.class), Consumed.with(Serdes.String(), JsonSerde.of(Flow.class)).withName("GlobalStore.Flow"), () -> new GlobalInMemoryStateProcessor<>(FLOW_STATE_STORE_NAME, flows -> kafkaFlowExecutor.setFlows(flows), store -> kafkaFlowExecutor.setStore(store)));
builder.addGlobalStore(Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(TEMPLATE_STATE_STORE_NAME), Serdes.String(), JsonSerde.of(Template.class)), kafkaAdminService.getTopicName(Template.class), Consumed.with(Serdes.String(), JsonSerde.of(Template.class)).withName("GlobalStore.Template"), () -> new GlobalInMemoryStateProcessor<>(TEMPLATE_STATE_STORE_NAME, templates -> kafkaTemplateExecutor.setTemplates(templates)));
return builder;
}
use of io.kestra.runner.kafka.services.KafkaStreamsBuilder in project kestra by kestra-io.
the class ExecutorTriggerCleaner method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
KStream<String, Executor> executorKStream = kafkaStreamSourceService.executorKStream(builder);
KStream<String, Executor> executionWithFlowKStream = kafkaStreamSourceService.executorWithFlow(executorKStream, false);
GlobalKTable<String, Trigger> triggerGlobalKTable = kafkaStreamSourceService.triggerGlobalKTable(builder);
executionWithFlowKStream.filter((key, value) -> value.getExecution().getTrigger() != null, Named.as("cleanTrigger-hasTrigger-filter")).filter((key, value) -> conditionService.isTerminatedWithListeners(value.getFlow(), value.getExecution()), Named.as("cleanTrigger-terminated-filter")).join(triggerGlobalKTable, (key, executionWithFlow) -> Trigger.uid(executionWithFlow.getExecution()), (execution, trigger) -> trigger.resetExecution(), Named.as("cleanTrigger-join")).selectKey((key, value) -> queueService.key(value)).to(kafkaAdminService.getTopicName(Trigger.class), Produced.with(Serdes.String(), JsonSerde.of(Trigger.class)));
return builder;
}
use of io.kestra.runner.kafka.services.KafkaStreamsBuilder in project kestra by kestra-io.
the class ExecutorWorkerRunning method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
builder.addGlobalStore(Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(WORKERINSTANCE_STATE_STORE_NAME), Serdes.String(), JsonSerde.of(WorkerInstance.class)), kafkaAdminService.getTopicName(KafkaStreamSourceService.TOPIC_EXECUTOR_WORKERINSTANCE), Consumed.with(Serdes.String(), JsonSerde.of(WorkerInstance.class)).withName("GlobalStore.ExecutorWorkerInstance"), () -> new GlobalStateProcessor<>(WORKERINSTANCE_STATE_STORE_NAME));
// only used as state store
builder.globalTable(kafkaAdminService.getTopicName(WorkerTaskRunning.class), Consumed.with(Serdes.String(), JsonSerde.of(WorkerTaskRunning.class)).withName("GlobalKTable.WorkerTaskRunning"), Materialized.<String, WorkerTaskRunning, KeyValueStore<Bytes, byte[]>>as(WORKER_RUNNING_STATE_STORE_NAME).withKeySerde(Serdes.String()).withValueSerde(JsonSerde.of(WorkerTaskRunning.class)));
KStream<String, WorkerInstance> workerInstanceKStream = builder.stream(kafkaAdminService.getTopicName(WorkerInstance.class), Consumed.with(Serdes.String(), JsonSerde.of(WorkerInstance.class)).withName("KStream.WorkerInstance"));
workerInstanceKStream.to(kafkaAdminService.getTopicName(TOPIC_EXECUTOR_WORKERINSTANCE), Produced.with(Serdes.String(), JsonSerde.of(WorkerInstance.class)).withName("DetectNewWorker.toExecutorWorkerInstance"));
KStream<String, WorkerInstanceTransformer.Result> stream = workerInstanceKStream.transformValues(WorkerInstanceTransformer::new, Named.as("DetectNewWorker.workerInstanceTransformer")).flatMapValues((readOnlyKey, value) -> value, Named.as("DetectNewWorker.flapMapList"));
// we resend the worker task from evicted worker
KStream<String, WorkerTask> resultWorkerTask = stream.flatMapValues((readOnlyKey, value) -> value.getWorkerTasksToSend(), Named.as("DetectNewWorkerTask.flapMapWorkerTaskToSend"));
// and remove from running since already sent
resultWorkerTask.map((key, value) -> KeyValue.pair(value.getTaskRun().getId(), (WorkerTaskRunning) null), Named.as("DetectNewWorkerTask.workerTaskRunningToNull")).to(kafkaAdminService.getTopicName(WorkerTaskRunning.class), Produced.with(Serdes.String(), JsonSerde.of(WorkerTaskRunning.class)).withName("DetectNewWorker.toWorkerTaskRunning"));
KafkaStreamSourceService.logIfEnabled(log, resultWorkerTask, (key, value) -> executorService.log(log, false, value), "DetectNewWorkerTask").to(kafkaAdminService.getTopicName(WorkerTask.class), Produced.with(Serdes.String(), JsonSerde.of(WorkerTask.class)).withName("DetectNewWorkerTask.toWorkerTask"));
// we resend the WorkerInstance update
KStream<String, WorkerInstance> updatedStream = KafkaStreamSourceService.logIfEnabled(log, stream, (key, value) -> log.debug("Instance updated: {}", value), "DetectNewWorkerInstance").map((key, value) -> value.getWorkerInstanceUpdated(), Named.as("DetectNewWorkerInstance.mapInstance"));
// cleanup executor workerinstance state store
updatedStream.filter((key, value) -> value != null, Named.as("DetectNewWorkerInstance.filterNotNull")).to(kafkaAdminService.getTopicName(TOPIC_EXECUTOR_WORKERINSTANCE), Produced.with(Serdes.String(), JsonSerde.of(WorkerInstance.class)).withName("DetectNewWorkerInstance.toExecutorWorkerInstance"));
updatedStream.to(kafkaAdminService.getTopicName(WorkerInstance.class), Produced.with(Serdes.String(), JsonSerde.of(WorkerInstance.class)).withName("DetectNewWorkerInstance.toWorkerInstance"));
return builder;
}
use of io.kestra.runner.kafka.services.KafkaStreamsBuilder in project kestra by kestra-io.
the class ExecutorFlowLast method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
// last global KTable
GlobalKTable<String, Flow> flowGlobalKTable = builder.globalTable(kafkaAdminService.getTopicName(KafkaStreamSourceService.TOPIC_FLOWLAST), Consumed.with(Serdes.String(), JsonSerde.of(Flow.class)).withName("GlobalKTable.FlowLast"), Materialized.<String, Flow, KeyValueStore<Bytes, byte[]>>as("last").withKeySerde(Serdes.String()).withValueSerde(JsonSerde.of(Flow.class)));
// stream
KStream<String, Flow> stream = builder.stream(kafkaAdminService.getTopicName(Flow.class), Consumed.with(Serdes.String(), JsonSerde.of(Flow.class, false)).withName("Stream.Flow"));
// logs
stream = KafkaStreamSourceService.logIfEnabled(log, stream, (key, value) -> log.trace("Flow in '{}.{}' with revision {}", value.getNamespace(), value.getId(), value.getRevision()), "Main");
// join with previous if more recent revision
KStream<String, ExecutorFlowLast.FlowWithPrevious> streamWithPrevious = stream.filter((key, value) -> value != null, Named.as("Main.notNull")).selectKey((key, value) -> value.uidWithoutRevision(), Named.as("Main.selectKey")).leftJoin(flowGlobalKTable, (key, value) -> key, (readOnlyKey, current, previous) -> {
if (previous == null) {
return new ExecutorFlowLast.FlowWithPrevious(current, null);
} else if (current.getRevision() < previous.getRevision()) {
return null;
} else {
return new ExecutorFlowLast.FlowWithPrevious(current, previous);
}
}, Named.as("Main.join")).filter((key, value) -> value != null, Named.as("Main.joinNotNull"));
// remove triggers
streamWithPrevious.flatMap((key, value) -> {
List<AbstractTrigger> deletedTriggers = new ArrayList<>();
if (value.getFlow().isDeleted()) {
deletedTriggers = ListUtils.emptyOnNull(value.getFlow().getTriggers());
} else if (value.getPrevious() != null) {
deletedTriggers = FlowService.findRemovedTrigger(value.getFlow(), value.getPrevious());
}
return deletedTriggers.stream().map(t -> new KeyValue<>(queueService.key(Trigger.of(value.getFlow(), t)), (Trigger) null)).collect(Collectors.toList());
}, Named.as("DeleteTrigger.flatMap")).to(kafkaAdminService.getTopicName(Trigger.class), Produced.with(Serdes.String(), JsonSerde.of(Trigger.class)).withName("To.Trigger"));
// send to last and don't drop deleted flow in order to keep last version
streamWithPrevious.map((key, value) -> new KeyValue<>(value.getFlow().uidWithoutRevision(), value.getFlow()), Named.as("Main.Map")).to(kafkaAdminService.getTopicName(KafkaStreamSourceService.TOPIC_FLOWLAST), Produced.with(Serdes.String(), JsonSerde.of(Flow.class)).withName("To.FlowLast"));
return builder;
}
Aggregations