use of io.kestra.core.models.triggers.AbstractTrigger in project kestra by kestra-io.
the class PluginScanner method scanClassLoader.
@SuppressWarnings({ "unchecked", "rawtypes" })
private RegisteredPlugin scanClassLoader(final ClassLoader classLoader, ExternalPlugin externalPlugin, Manifest manifest) {
List<Class<? extends Task>> tasks = new ArrayList<>();
List<Class<? extends AbstractTrigger>> triggers = new ArrayList<>();
List<Class<? extends Condition>> conditions = new ArrayList<>();
List<Class<? extends StorageInterface>> storages = new ArrayList<>();
List<Class<?>> controllers = new ArrayList<>();
final SoftServiceLoader<BeanIntrospectionReference> definitions = SoftServiceLoader.load(BeanIntrospectionReference.class, classLoader);
if (manifest == null) {
manifest = getManifest(classLoader);
}
for (ServiceDefinition<BeanIntrospectionReference> definition : definitions) {
if (definition.isPresent()) {
final BeanIntrospectionReference ref = definition.load();
Class beanType = ref.getBeanType();
if (Modifier.isAbstract(beanType.getModifiers())) {
continue;
}
if (Task.class.isAssignableFrom(beanType)) {
tasks.add(beanType);
}
if (AbstractTrigger.class.isAssignableFrom(beanType)) {
triggers.add(beanType);
}
if (Condition.class.isAssignableFrom(beanType)) {
conditions.add(beanType);
}
if (StorageInterface.class.isAssignableFrom(beanType)) {
storages.add(beanType);
}
if (beanType.isAnnotationPresent(Controller.class)) {
controllers.add(beanType);
}
}
}
return RegisteredPlugin.builder().externalPlugin(externalPlugin).manifest(manifest).classLoader(classLoader).tasks(tasks).triggers(triggers).conditions(conditions).controllers(controllers).storages(storages).build();
}
use of io.kestra.core.models.triggers.AbstractTrigger in project kestra by kestra-io.
the class ExecutorFlowLast method topology.
public StreamsBuilder topology() {
StreamsBuilder builder = new KafkaStreamsBuilder();
// last global KTable
GlobalKTable<String, Flow> flowGlobalKTable = builder.globalTable(kafkaAdminService.getTopicName(KafkaStreamSourceService.TOPIC_FLOWLAST), Consumed.with(Serdes.String(), JsonSerde.of(Flow.class)).withName("GlobalKTable.FlowLast"), Materialized.<String, Flow, KeyValueStore<Bytes, byte[]>>as("last").withKeySerde(Serdes.String()).withValueSerde(JsonSerde.of(Flow.class)));
// stream
KStream<String, Flow> stream = builder.stream(kafkaAdminService.getTopicName(Flow.class), Consumed.with(Serdes.String(), JsonSerde.of(Flow.class, false)).withName("Stream.Flow"));
// logs
stream = KafkaStreamSourceService.logIfEnabled(log, stream, (key, value) -> log.trace("Flow in '{}.{}' with revision {}", value.getNamespace(), value.getId(), value.getRevision()), "Main");
// join with previous if more recent revision
KStream<String, ExecutorFlowLast.FlowWithPrevious> streamWithPrevious = stream.filter((key, value) -> value != null, Named.as("Main.notNull")).selectKey((key, value) -> value.uidWithoutRevision(), Named.as("Main.selectKey")).leftJoin(flowGlobalKTable, (key, value) -> key, (readOnlyKey, current, previous) -> {
if (previous == null) {
return new ExecutorFlowLast.FlowWithPrevious(current, null);
} else if (current.getRevision() < previous.getRevision()) {
return null;
} else {
return new ExecutorFlowLast.FlowWithPrevious(current, previous);
}
}, Named.as("Main.join")).filter((key, value) -> value != null, Named.as("Main.joinNotNull"));
// remove triggers
streamWithPrevious.flatMap((key, value) -> {
List<AbstractTrigger> deletedTriggers = new ArrayList<>();
if (value.getFlow().isDeleted()) {
deletedTriggers = ListUtils.emptyOnNull(value.getFlow().getTriggers());
} else if (value.getPrevious() != null) {
deletedTriggers = FlowService.findRemovedTrigger(value.getFlow(), value.getPrevious());
}
return deletedTriggers.stream().map(t -> new KeyValue<>(queueService.key(Trigger.of(value.getFlow(), t)), (Trigger) null)).collect(Collectors.toList());
}, Named.as("DeleteTrigger.flatMap")).to(kafkaAdminService.getTopicName(Trigger.class), Produced.with(Serdes.String(), JsonSerde.of(Trigger.class)).withName("To.Trigger"));
// send to last and don't drop deleted flow in order to keep last version
streamWithPrevious.map((key, value) -> new KeyValue<>(value.getFlow().uidWithoutRevision(), value.getFlow()), Named.as("Main.Map")).to(kafkaAdminService.getTopicName(KafkaStreamSourceService.TOPIC_FLOWLAST), Produced.with(Serdes.String(), JsonSerde.of(Flow.class)).withName("To.FlowLast"));
return builder;
}
Aggregations