use of org.kie.kogito.internal.process.runtime.KogitoWorkflowProcess in project kogito-runtimes by kiegroup.
the class ProcessGenerationUtils method execModelFromProcessFile.
/**
* Creates a list of {@link ProcessExecutableModelGenerator} for process generators
*
* @param processFilePath from the test/resources classpath folder
* @return a list of {@link ProcessExecutableModelGenerator} from the given file
*/
public static List<ProcessExecutableModelGenerator> execModelFromProcessFile(final String processFilePath) {
final File processFile = new File(ProcessGenerationUtils.class.getResource(processFilePath).getFile());
final List<Process> processes = parseProcesses(Collections.singleton(processFile));
Assertions.assertThat(processes).isNotEmpty();
final ProcessToExecModelGenerator execModelGenerator = new ProcessToExecModelGenerator(ProcessGenerationUtils.class.getClassLoader());
final List<ProcessExecutableModelGenerator> processExecutableModelGenerators = new ArrayList<>();
processes.forEach(p -> {
processExecutableModelGenerators.add(new ProcessExecutableModelGenerator((KogitoWorkflowProcess) p, execModelGenerator));
});
return processExecutableModelGenerators;
}
use of org.kie.kogito.internal.process.runtime.KogitoWorkflowProcess in project kogito-runtimes by kiegroup.
the class ProcessResourceGeneratorTest method parseProcess.
private KogitoWorkflowProcess parseProcess(String fileName) {
Collection<Process> processes = ProcessCodegen.parseProcessFile(new FileSystemResource(new File(fileName)));
assertThat(processes).hasSize(1);
Process process = processes.stream().findAny().orElseThrow();
assertThat(process).isInstanceOf(KogitoWorkflowProcess.class);
return (KogitoWorkflowProcess) process;
}
use of org.kie.kogito.internal.process.runtime.KogitoWorkflowProcess in project kogito-runtimes by kiegroup.
the class ProcessResourceGeneratorTest method getResourceClassDeclaration.
private ClassOrInterfaceDeclaration getResourceClassDeclaration(KogitoBuildContext.Builder contextBuilder, String fileName) {
KogitoWorkflowProcess process = parseProcess(fileName);
CompilationUnit compilationUnit = getCompilationUnit(contextBuilder, process);
Optional<ClassOrInterfaceDeclaration> classDeclaration = compilationUnit.getClassByName(process.getId() + "Resource");
assertThat(classDeclaration).isNotEmpty();
return classDeclaration.orElseThrow();
}
use of org.kie.kogito.internal.process.runtime.KogitoWorkflowProcess in project kogito-runtimes by kiegroup.
the class ServerlessWorkflowParser method parseProcess.
private GeneratedInfo<KogitoWorkflowProcess> parseProcess() {
String workflowStartStateName = workflow.getStart().getStateName();
if (workflowStartStateName == null || workflowStartStateName.trim().isEmpty()) {
throw new IllegalArgumentException("workflow does not define a starting state");
}
RuleFlowProcessFactory factory = RuleFlowProcessFactory.createProcess(workflow.getId()).name(workflow.getName() == null ? DEFAULT_NAME : workflow.getName()).version(workflow.getVersion() == null ? DEFAULT_VERSION : workflow.getVersion()).packageName(workflow.getMetadata() != null ? workflow.getMetadata().getOrDefault("package", DEFAULT_PACKAGE) : DEFAULT_PACKAGE).visibility("Public").variable(DEFAULT_WORKFLOW_VAR, new ObjectDataType(JsonNode.class), ObjectMapperFactory.get().createObjectNode());
ParserContext parserContext = new ParserContext(idGenerator, factory, context);
Constants constants = workflow.getConstants();
if (constants != null) {
factory.metaData(Metadata.CONSTANTS, constants.getConstantsDef());
}
Collection<StateHandler<?>> handlers = workflow.getStates().stream().map(state -> StateHandlerFactory.getStateHandler(state, workflow, parserContext)).filter(Optional::isPresent).map(Optional::get).filter(state -> !state.usedForCompensation()).collect(Collectors.toList());
handlers.forEach(StateHandler::handleStart);
handlers.forEach(StateHandler::handleEnd);
handlers.forEach(StateHandler::handleState);
handlers.forEach(StateHandler::handleTransitions);
handlers.forEach(StateHandler::handleErrors);
handlers.forEach(StateHandler::handleConnections);
if (parserContext.isCompensation()) {
factory.metaData(Metadata.COMPENSATION, true);
factory.addCompensationContext(workflow.getId());
}
return new GeneratedInfo<>(factory.validate().getProcess(), parserContext.generatedFiles());
}
use of org.kie.kogito.internal.process.runtime.KogitoWorkflowProcess in project kogito-runtimes by kiegroup.
the class ProcessCodegen method internalGenerate.
@Override
protected Collection<GeneratedFile> internalGenerate() {
List<ProcessGenerator> ps = new ArrayList<>();
List<ProcessInstanceGenerator> pis = new ArrayList<>();
List<ProcessExecutableModelGenerator> processExecutableModelGenerators = new ArrayList<>();
// REST resources
List<ProcessResourceGenerator> rgs = new ArrayList<>();
// message endpoints/consumers
List<MessageConsumerGenerator> megs = new ArrayList<>();
// message producers
List<MessageProducerGenerator> mpgs = new ArrayList<>();
Map<String, ModelClassGenerator> processIdToModelGenerator = new HashMap<>();
Map<String, InputModelClassGenerator> processIdToInputModelGenerator = new HashMap<>();
Map<String, OutputModelClassGenerator> processIdToOutputModelGenerator = new HashMap<>();
Map<String, List<UserTaskModelMetaData>> processIdToUserTaskModel = new HashMap<>();
Map<String, ProcessMetaData> processIdToMetadata = new HashMap<>();
OpenApiClientWorkItemIntrospector introspector = new OpenApiClientWorkItemIntrospector(this.context());
// first we generate all the data classes from variable declarations
for (WorkflowProcess workFlowProcess : processes.values()) {
ModelClassGenerator mcg = new ModelClassGenerator(context(), workFlowProcess);
processIdToModelGenerator.put(workFlowProcess.getId(), mcg);
InputModelClassGenerator imcg = new InputModelClassGenerator(context(), workFlowProcess);
processIdToInputModelGenerator.put(workFlowProcess.getId(), imcg);
OutputModelClassGenerator omcg = new OutputModelClassGenerator(context(), workFlowProcess);
processIdToOutputModelGenerator.put(workFlowProcess.getId(), omcg);
}
// then we generate user task inputs and outputs if any
for (WorkflowProcess workFlowProcess : processes.values()) {
UserTasksModelClassGenerator utcg = new UserTasksModelClassGenerator(workFlowProcess);
processIdToUserTaskModel.put(workFlowProcess.getId(), utcg.generate());
}
// then we can instantiate the exec model generator
// with the data classes that we have already resolved
ProcessToExecModelGenerator execModelGenerator = new ProcessToExecModelGenerator(context().getClassLoader());
// collect all process descriptors (exec model)
for (KogitoWorkflowProcess workFlowProcess : processes.values()) {
introspector.introspect(workFlowProcess);
ProcessExecutableModelGenerator execModelGen = new ProcessExecutableModelGenerator(workFlowProcess, execModelGenerator);
String packageName = workFlowProcess.getPackageName();
String id = workFlowProcess.getId();
try {
ProcessMetaData generate = execModelGen.generate();
processIdToMetadata.put(id, generate);
processExecutableModelGenerators.add(execModelGen);
} catch (RuntimeException e) {
throw new ProcessCodegenException(id, packageName, e);
}
}
// generate Process, ProcessInstance classes and the REST resource
for (ProcessExecutableModelGenerator execModelGen : processExecutableModelGenerators) {
String classPrefix = StringUtils.ucFirst(execModelGen.extractedProcessId());
KogitoWorkflowProcess workFlowProcess = execModelGen.process();
ModelClassGenerator modelClassGenerator = processIdToModelGenerator.get(execModelGen.getProcessId());
ProcessGenerator p = new ProcessGenerator(context(), workFlowProcess, execModelGen, classPrefix, modelClassGenerator.className(), applicationCanonicalName());
ProcessInstanceGenerator pi = new ProcessInstanceGenerator(workFlowProcess.getPackageName(), classPrefix, modelClassGenerator.generate());
ProcessMetaData metaData = processIdToMetadata.get(workFlowProcess.getId());
// Creating and adding the ResourceGenerator
ProcessResourceGenerator processResourceGenerator = new ProcessResourceGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName());
processResourceGenerator.withUserTasks(processIdToUserTaskModel.get(workFlowProcess.getId())).withSignals(metaData.getSignals()).withTriggers(metaData.isStartable(), metaData.isDynamic());
rgs.add(processResourceGenerator);
if (metaData.getTriggers() != null) {
for (TriggerMetaData trigger : metaData.getTriggers()) {
// generate message consumers for processes with message start events
if (trigger.getType().equals(TriggerMetaData.TriggerType.ConsumeMessage)) {
MessageConsumerGenerator messageConsumerGenerator = new MessageConsumerGenerator(context(), workFlowProcess, modelClassGenerator.className(), execModelGen.className(), applicationCanonicalName(), trigger);
megs.add(messageConsumerGenerator);
metaData.getConsumers().put(trigger.getName(), messageConsumerGenerator.compilationUnit());
} else if (trigger.getType().equals(TriggerMetaData.TriggerType.ProduceMessage)) {
MessageProducerGenerator messageProducerGenerator = new MessageProducerGenerator(context(), workFlowProcess, trigger);
mpgs.add(messageProducerGenerator);
metaData.getProducers().put(trigger.getName(), messageProducerGenerator.compilationUnit());
}
}
}
processGenerators.add(p);
ps.add(p);
pis.add(pi);
}
for (ModelClassGenerator modelClassGenerator : processIdToModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(MODEL_TYPE, modelClassGenerator.generatedFilePath(), mmd.generate());
}
for (ModelClassGenerator modelClassGenerator : processIdToModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(MODEL_TYPE, modelClassGenerator.generatedFilePath(), mmd.generate());
}
for (InputModelClassGenerator modelClassGenerator : processIdToInputModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(MODEL_TYPE, modelClassGenerator.generatedFilePath(), mmd.generate());
}
for (OutputModelClassGenerator modelClassGenerator : processIdToOutputModelGenerator.values()) {
ModelMetaData mmd = modelClassGenerator.generate();
storeFile(MODEL_TYPE, modelClassGenerator.generatedFilePath(), mmd.generate());
}
for (List<UserTaskModelMetaData> utmd : processIdToUserTaskModel.values()) {
for (UserTaskModelMetaData ut : utmd) {
storeFile(MODEL_TYPE, UserTasksModelClassGenerator.generatedFilePath(ut.getInputModelClassName()), ut.generateInput());
storeFile(MODEL_TYPE, UserTasksModelClassGenerator.generatedFilePath(ut.getOutputModelClassName()), ut.generateOutput());
storeFile(MODEL_TYPE, UserTasksModelClassGenerator.generatedFilePath(ut.getTaskModelClassName()), ut.generateModel());
}
}
if (context().hasRESTForGenerator(this)) {
for (ProcessResourceGenerator resourceGenerator : rgs) {
storeFile(REST_TYPE, resourceGenerator.generatedFilePath(), resourceGenerator.generate());
storeFile(MODEL_TYPE, UserTasksModelClassGenerator.generatedFilePath(resourceGenerator.getTaskModelFactoryClassName()), resourceGenerator.getTaskModelFactory());
}
// Generating the Producer classes for Dependency Injection
StaticDependencyInjectionProducerGenerator.of(context()).generate().entrySet().forEach(entry -> storeFile(PRODUCER_TYPE, entry.getKey(), entry.getValue()));
}
for (MessageConsumerGenerator messageConsumerGenerator : megs) {
storeFile(MESSAGE_CONSUMER_TYPE, messageConsumerGenerator.generatedFilePath(), messageConsumerGenerator.generate());
}
for (MessageProducerGenerator messageProducerGenerator : mpgs) {
storeFile(MESSAGE_PRODUCER_TYPE, messageProducerGenerator.generatedFilePath(), messageProducerGenerator.generate());
}
for (ProcessGenerator p : ps) {
storeFile(PROCESS_TYPE, p.generatedFilePath(), p.generate());
p.getAdditionalClasses().forEach(cp -> {
String packageName = cp.getPackageDeclaration().map(pd -> pd.getName().toString()).orElse("");
String clazzName = cp.findFirst(ClassOrInterfaceDeclaration.class).map(cls -> cls.getName().toString()).get();
String path = (packageName + "." + clazzName).replace('.', '/') + ".java";
storeFile(GeneratedFileType.SOURCE, path, cp.toString());
});
}
if ((context().getAddonsConfig().useProcessSVG())) {
Map<String, String> svgs = context().getContextAttribute(ContextAttributesConstants.PROCESS_AUTO_SVG_MAPPING, Map.class);
svgs.keySet().stream().forEach(key -> storeFile(GeneratedFileType.INTERNAL_RESOURCE, "META-INF/processSVG/" + key + ".svg", svgs.get(key)));
}
if (context().hasRESTForGenerator(this)) {
final ProcessCloudEventMetaFactoryGenerator topicsGenerator = new ProcessCloudEventMetaFactoryGenerator(context(), processExecutableModelGenerators);
storeFile(REST_TYPE, topicsGenerator.generatedFilePath(), topicsGenerator.generate());
}
for (ProcessInstanceGenerator pi : pis) {
storeFile(PROCESS_INSTANCE_TYPE, pi.generatedFilePath(), pi.generate());
}
// generate Grafana dashboards
if (context().getAddonsConfig().usePrometheusMonitoring()) {
Optional<String> globalDbJson = generateOperationalDashboard(GLOBAL_OPERATIONAL_DASHBOARD_TEMPLATE, "Global", context().getPropertiesMap(), "Global", context().getGAV().orElse(KogitoGAV.EMPTY_GAV), false);
String globalDbName = buildDashboardName(context().getGAV(), "Global");
globalDbJson.ifPresent(dashboard -> generatedFiles.addAll(DashboardGeneratedFileUtils.operational(dashboard, globalDbName + ".json")));
for (KogitoWorkflowProcess process : processes.values()) {
String dbName = buildDashboardName(context().getGAV(), process.getId());
Optional<String> dbJson = generateOperationalDashboard(PROCESS_OPERATIONAL_DASHBOARD_TEMPLATE, process.getId(), context().getPropertiesMap(), process.getId(), context().getGAV().orElse(KogitoGAV.EMPTY_GAV), false);
dbJson.ifPresent(dashboard -> generatedFiles.addAll(DashboardGeneratedFileUtils.operational(dashboard, dbName + ".json")));
}
}
return generatedFiles;
}
Aggregations