use of io.automatiko.engine.workflow.compiler.canonical.ProcessMetaData in project automatiko-engine by automatiko-io.
the class ProcessCodegen method populateSubprocessesGraphQL.
protected List<AbstractResourceGenerator> populateSubprocessesGraphQL(WorkflowProcess parentProcess, ProcessMetaData metaData, Map<String, ProcessMetaData> processIdToMetadata, Map<String, ModelClassGenerator> processIdToModelGenerator, List<ProcessExecutableModelGenerator> processExecutableModelGenerators, Map<String, List<UserTaskModelMetaData>> processIdToUserTaskModel) {
List<AbstractResourceGenerator> subprocesses = new ArrayList<AbstractResourceGenerator>();
for (Entry<String, String> entry : metaData.getSubProcesses().entrySet()) {
ProcessExecutableModelGenerator execModelGen = processExecutableModelGenerators.stream().filter(p -> p.getProcessId().equals(entry.getValue())).findFirst().orElse(null);
if (execModelGen != null) {
WorkflowProcess workFlowProcess = execModelGen.process();
ModelClassGenerator modelClassGenerator = processIdToModelGenerator.get(entry.getValue());
Optional.of(new SubprocessGraphQLResourceGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName)).map(r -> r.withDependencyInjection(annotator).withParentProcess(parentProcess).withUserTasks(processIdToUserTaskModel.get(execModelGen.getProcessId())).withSignals(processIdToMetadata.get(execModelGen.getProcessId()).getSignals()).withTriggers(processIdToMetadata.get(execModelGen.getProcessId()).isStartable(), processIdToMetadata.get(execModelGen.getProcessId()).isDynamic()).withSubProcesses(populateSubprocessesGraphQL(workFlowProcess, processIdToMetadata.get(execModelGen.getProcessId()), processIdToMetadata, processIdToModelGenerator, processExecutableModelGenerators, processIdToUserTaskModel))).ifPresent(subprocesses::add);
}
}
return subprocesses;
}
use of io.automatiko.engine.workflow.compiler.canonical.ProcessMetaData in project automatiko-engine by automatiko-io.
the class ProcessCodegen method generate.
public List<GeneratedFile> generate() {
if (processes.isEmpty()) {
return Collections.emptyList();
}
List<ProcessGenerator> ps = new ArrayList<>();
List<ProcessInstanceGenerator> pis = new ArrayList<>();
List<ProcessExecutableModelGenerator> processExecutableModelGenerators = new ArrayList<>();
// REST resources
List<AbstractResourceGenerator> rgs = new ArrayList<>();
// GraphQL resources
List<AbstractResourceGenerator> grapggs = new ArrayList<>();
// Function resources
List<FunctionGenerator> fgs = new ArrayList<>();
// Function flow resources
List<FunctionFlowGenerator> ffgs = new ArrayList<>();
// message data events
List<MessageDataEventGenerator> mdegs = new ArrayList<>();
// message endpoints/consumers
Set<MessageConsumerGenerator> megs = new LinkedHashSet<>();
// message producers
List<MessageProducerGenerator> mpgs = new ArrayList<>();
// OpenAPI clients
Set<OpenAPIClientGenerator> opgs = new LinkedHashSet<>();
List<String> publicProcesses = new ArrayList<>();
Map<String, ModelMetaData> processIdToModel = new HashMap<>();
Map<String, ModelClassGenerator> processIdToModelGenerator = new HashMap<>();
Map<String, InputModelClassGenerator> processIdToInputModelGenerator = new HashMap<>();
Map<String, OutputModelClassGenerator> processIdToOutputModelGenerator = new HashMap<>();
Map<String, List<UserTaskModelMetaData>> processIdToUserTaskModel = new HashMap<>();
Map<String, ProcessMetaData> processIdToMetadata = new HashMap<>();
String workflowType = Process.WORKFLOW_TYPE;
if (isFunctionFlowProject()) {
workflowType = Process.FUNCTION_FLOW_TYPE;
} else if (isFunctionProject()) {
workflowType = Process.FUNCTION_TYPE;
}
// then we can instantiate the exec model generator
// with the data classes that we have already resolved
ProcessToExecModelGenerator execModelGenerator = new ProcessToExecModelGenerator(contextClassLoader, workflowType);
// first we generate all the data classes from variable declarations
for (Entry<String, WorkflowProcess> entry : processes.entrySet()) {
ModelClassGenerator mcg = new ModelClassGenerator(execModelGenerator, context(), entry.getValue());
processIdToModelGenerator.put(entry.getKey(), mcg);
processIdToModel.put(entry.getKey(), mcg.generate());
InputModelClassGenerator imcg = new InputModelClassGenerator(context(), entry.getValue(), workflowType);
processIdToInputModelGenerator.put(entry.getKey(), imcg);
OutputModelClassGenerator omcg = new OutputModelClassGenerator(context(), entry.getValue(), workflowType);
processIdToOutputModelGenerator.put(entry.getKey(), omcg);
context.addGenerator("ModelClassGenerator", entry.getKey(), mcg);
context.addGenerator("InputModelClassGenerator", entry.getKey(), imcg);
context.addGenerator("OutputModelClassGenerator", entry.getKey(), omcg);
}
// then we generate user task inputs and outputs if any
for (Entry<String, WorkflowProcess> entry : processes.entrySet()) {
UserTasksModelClassGenerator utcg = new UserTasksModelClassGenerator(entry.getValue(), context);
processIdToUserTaskModel.put(entry.getKey(), utcg.generate());
}
List<String> functions = context.getBuildContext().classThatImplement(Functions.class.getCanonicalName());
// collect all process descriptors (exec model)
for (Entry<String, WorkflowProcess> entry : processes.entrySet()) {
ProcessExecutableModelGenerator execModelGen = new ProcessExecutableModelGenerator(entry.getValue(), execModelGenerator);
String packageName = entry.getValue().getPackageName();
String id = entry.getKey();
// add extra meta data to indicate if user task mgmt is available
if (context.getBuildContext().isUserTaskMgmtSupported()) {
entry.getValue().getMetaData().put("UserTaskMgmt", "true");
}
Set<String> classImports = ((io.automatiko.engine.workflow.process.core.WorkflowProcess) entry.getValue()).getImports();
if (classImports != null) {
classImports = new HashSet<>();
((io.automatiko.engine.workflow.process.core.WorkflowProcess) entry.getValue()).setImports(classImports);
}
classImports.add(BaseFunctions.class.getCanonicalName());
classImports.addAll(functions);
try {
ProcessMetaData generate = execModelGen.generate();
processIdToMetadata.put(id, generate);
processExecutableModelGenerators.add(execModelGen);
context.addProcess(id, generate);
} catch (RuntimeException e) {
LOGGER.error(e.getMessage());
throw new ProcessCodegenException(id, packageName, e);
}
}
// generate Process, ProcessInstance classes and the REST resource
for (ProcessExecutableModelGenerator execModelGen : processExecutableModelGenerators) {
String classPrefix = StringUtils.capitalize(execModelGen.extractedProcessId());
WorkflowProcess workFlowProcess = execModelGen.process();
ModelClassGenerator modelClassGenerator = processIdToModelGenerator.get(execModelGen.getProcessId());
ProcessGenerator p = new ProcessGenerator(context, workFlowProcess, execModelGen, classPrefix, modelClassGenerator.className(), applicationCanonicalName, processIdToUserTaskModel.get(execModelGen.getProcessId()), processIdToMetadata).withDependencyInjection(annotator).withPersistence(persistence);
ProcessInstanceGenerator pi = new ProcessInstanceGenerator(workflowType, context(), execModelGen, workFlowProcess.getPackageName(), classPrefix, modelClassGenerator.generate());
ProcessMetaData metaData = processIdToMetadata.get(execModelGen.getProcessId());
if (isFunctionFlowProject()) {
ffgs.add(new FunctionFlowGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName).withDependencyInjection(annotator).withSignals(metaData.getSignals(), metaData.getSignalNodes()).withTriggers(metaData.getTriggers()));
if (metaData.getTriggers() != null) {
for (TriggerMetaData trigger : metaData.getTriggers()) {
if (trigger.getType().equals(TriggerMetaData.TriggerType.ProduceMessage)) {
MessageDataEventGenerator msgDataEventGenerator = new MessageDataEventGenerator(workFlowProcess, trigger).withDependencyInjection(annotator);
mdegs.add(msgDataEventGenerator);
mpgs.add(new MessageProducerGenerator(workflowType, context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), msgDataEventGenerator.className(), trigger).withDependencyInjection(annotator));
}
}
}
} else if (isFunctionProject()) {
fgs.add(new FunctionGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName).withDependencyInjection(annotator));
} else if (isServiceProject()) {
if (isPublic(workFlowProcess)) {
// Creating and adding the ResourceGenerator
resourceGeneratorFactory.create(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName).map(r -> r.withDependencyInjection(annotator).withParentProcess(null).withPersistence(persistence).withUserTasks(processIdToUserTaskModel.get(execModelGen.getProcessId())).withPathPrefix("{id}").withSignals(metaData.getSignals()).withTriggers(metaData.isStartable(), metaData.isDynamic()).withSubProcesses(populateSubprocesses(workFlowProcess, processIdToMetadata.get(execModelGen.getProcessId()), processIdToMetadata, processIdToModelGenerator, processExecutableModelGenerators, processIdToUserTaskModel))).ifPresent(rgs::add);
if (context.getBuildContext().isGraphQLSupported()) {
GraphQLResourceGenerator graphqlGenerator = new GraphQLResourceGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName);
graphqlGenerator.withDependencyInjection(annotator).withParentProcess(null).withPersistence(persistence).withUserTasks(processIdToUserTaskModel.get(execModelGen.getProcessId())).withPathPrefix(CodegenUtils.version(workFlowProcess.getVersion())).withSignals(metaData.getSignals()).withTriggers(metaData.isStartable(), metaData.isDynamic()).withSubProcesses(populateSubprocessesGraphQL(workFlowProcess, processIdToMetadata.get(execModelGen.getProcessId()), processIdToMetadata, processIdToModelGenerator, processExecutableModelGenerators, processIdToUserTaskModel));
grapggs.add(graphqlGenerator);
}
}
if (metaData.getTriggers() != null) {
for (TriggerMetaData trigger : metaData.getTriggers()) {
// generate message consumers for processes with message events
if (isPublic(workFlowProcess) && trigger.getType().equals(TriggerMetaData.TriggerType.ConsumeMessage)) {
MessageDataEventGenerator msgDataEventGenerator = new MessageDataEventGenerator(workFlowProcess, trigger).withDependencyInjection(annotator);
mdegs.add(msgDataEventGenerator);
megs.add(new MessageConsumerGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName, msgDataEventGenerator.className(), trigger).withDependencyInjection(annotator).withPersistence(persistence));
} else if (trigger.getType().equals(TriggerMetaData.TriggerType.ProduceMessage)) {
MessageDataEventGenerator msgDataEventGenerator = new MessageDataEventGenerator(workFlowProcess, trigger).withDependencyInjection(annotator);
mdegs.add(msgDataEventGenerator);
mpgs.add(new MessageProducerGenerator(workflowType, context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), msgDataEventGenerator.className(), trigger).withDependencyInjection(annotator));
}
}
}
}
if (metaData.getOpenAPIs() != null) {
for (OpenAPIMetaData api : metaData.getOpenAPIs()) {
OpenAPIClientGenerator oagenerator = new OpenAPIClientGenerator(context, workFlowProcess, api).withDependencyInjection(annotator);
opgs.add(oagenerator);
}
}
moduleGenerator.addProcess(p);
ps.add(p);
pis.add(pi);
}
for (ModelClassGenerator modelClassGenerator : processIdToModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(Type.MODEL, modelClassGenerator.generatedFilePath(), mmd.generate(annotator != null ? new String[] { "io.quarkus.runtime.annotations.RegisterForReflection" } : new String[0]));
}
for (InputModelClassGenerator modelClassGenerator : processIdToInputModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(Type.MODEL, modelClassGenerator.generatedFilePath(), mmd.generate(annotator != null ? new String[] { "io.quarkus.runtime.annotations.RegisterForReflection" } : new String[0]));
}
for (OutputModelClassGenerator modelClassGenerator : processIdToOutputModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(Type.MODEL, modelClassGenerator.generatedFilePath(), mmd.generate(annotator != null ? new String[] { "io.quarkus.runtime.annotations.RegisterForReflection" } : new String[0]));
}
for (List<UserTaskModelMetaData> utmd : processIdToUserTaskModel.values()) {
for (UserTaskModelMetaData ut : utmd) {
storeFile(Type.MODEL, UserTasksModelClassGenerator.generatedFilePath(ut.getInputModelClassName()), ut.generateInput());
storeFile(Type.MODEL, UserTasksModelClassGenerator.generatedFilePath(ut.getOutputModelClassName()), ut.generateOutput());
}
}
for (AbstractResourceGenerator resourceGenerator : rgs) {
storeFile(Type.REST, resourceGenerator.generatedFilePath(), resourceGenerator.generate());
}
for (AbstractResourceGenerator resourceGenerator : grapggs) {
storeFile(Type.GRAPHQL, resourceGenerator.generatedFilePath(), resourceGenerator.generate());
}
for (FunctionGenerator functionGenerator : fgs) {
storeFile(Type.FUNCTION, functionGenerator.generatedFilePath(), functionGenerator.generate());
}
for (FunctionFlowGenerator functionFlowGenerator : ffgs) {
storeFile(Type.FUNCTION_FLOW, functionFlowGenerator.generatedFilePath(), functionFlowGenerator.generate());
}
for (MessageDataEventGenerator messageDataEventGenerator : mdegs) {
storeFile(Type.CLASS, messageDataEventGenerator.generatedFilePath(), messageDataEventGenerator.generate());
}
for (MessageConsumerGenerator messageConsumerGenerator : megs) {
storeFile(Type.MESSAGE_CONSUMER, messageConsumerGenerator.generatedFilePath(), messageConsumerGenerator.generate());
}
for (MessageProducerGenerator messageProducerGenerator : mpgs) {
storeFile(Type.MESSAGE_PRODUCER, messageProducerGenerator.generatedFilePath(), messageProducerGenerator.generate());
}
for (OpenAPIClientGenerator openApiClientGenerator : opgs) {
openApiClientGenerator.generate();
Map<String, String> contents = openApiClientGenerator.generatedClasses();
for (Entry<String, String> entry : contents.entrySet()) {
storeFile(Type.CLASS, entry.getKey().replace('.', '/') + ".java", entry.getValue());
}
}
for (ProcessGenerator p : ps) {
storeFile(Type.PROCESS, p.generatedFilePath(), p.generate());
p.getAdditionalClasses().forEach(cp -> {
String packageName = cp.getPackageDeclaration().map(pd -> pd.getName().toString()).orElse("");
String clazzName = cp.findFirst(ClassOrInterfaceDeclaration.class).map(cls -> cls.getName().toString()).get();
String path = (packageName + "." + clazzName).replace('.', '/') + ".java";
storeFile(Type.CLASS, path, cp.toString());
});
}
for (ProcessInstanceGenerator pi : pis) {
storeFile(Type.PROCESS_INSTANCE, pi.generatedFilePath(), pi.generate());
}
for (ProcessExecutableModelGenerator processGenerator : processExecutableModelGenerators) {
if (processGenerator.isPublic()) {
publicProcesses.add(processGenerator.extractedProcessId());
}
}
return generatedFiles;
}
use of io.automatiko.engine.workflow.compiler.canonical.ProcessMetaData in project automatiko-engine by automatiko-io.
the class ProcessToExecModelGeneratorTest method testScriptAndWorkItemGeneration.
@Test
public void testScriptAndWorkItemGeneration() {
ExecutableProcessFactory factory = ExecutableProcessFactory.createProcess("demo.orders");
factory.variable("order", new ObjectDataType(Integer.class)).variable("approver", new ObjectDataType(String.class)).name("orders").packageName("com.myspace.demo").dynamic(false).version("1.0").workItemNode(1).name("Log").workName("Log").done().actionNode(2).name("Dump order").action("java", "System.out.println(\"Order has been created \" + order);").done().endNode(3).name("end").terminate(false).done().startNode(4).name("start").done().connection(2, 1).connection(4, 2).connection(1, 3);
WorkflowProcess process = factory.validate().getProcess();
ProcessMetaData processMetadata = ProcessToExecModelGenerator.INSTANCE.generate(process);
assertNotNull(processMetadata, "Dumper should return non null class for process");
logger.debug(processMetadata.getGeneratedClassModel().toString());
assertEquals("orders_1_0", processMetadata.getExtractedProcessId());
assertEquals("demo.orders", processMetadata.getProcessId());
assertEquals("orders", processMetadata.getProcessName());
assertEquals("_1_0", processMetadata.getProcessVersion());
assertEquals("com.myspace.demo.Orders_1_0Process", processMetadata.getProcessClassName());
assertNotNull(processMetadata.getGeneratedClassModel());
assertEquals(1, processMetadata.getWorkItems().size());
}
use of io.automatiko.engine.workflow.compiler.canonical.ProcessMetaData in project automatiko-engine by automatiko-io.
the class GeneratorContext method collectConnectedFiles.
public Set<File> collectConnectedFiles(Set<File> inputs) {
Set<File> outcome = new LinkedHashSet<File>(inputs);
for (File input : inputs) {
Set<ProcessMetaData> relatedProcesses = this.processes.values().stream().filter(pm -> isTheSameResource(input, pm.getSource())).collect(Collectors.toSet());
collectRelatedProcesses(outcome, relatedProcesses);
for (ProcessMetaData metadata : relatedProcesses) {
// last check if any of the processes is used as subprocess of any other process - find parent processes
Set<ProcessMetaData> parentProcesses = relatedProcesses.stream().filter(pm -> pm.getSubProcesses() != null && pm.getSubProcesses().containsKey(metadata.getExtractedProcessId())).collect(Collectors.toSet());
collectRelatedProcesses(outcome, parentProcesses);
}
}
return outcome;
}
Aggregations