use of io.cdap.cdap.api.workflow.WorkflowNode in project cdap by caskdata.
the class WorkflowDriver method executeCondition.
@SuppressWarnings("unchecked")
private void executeCondition(ApplicationSpecification appSpec, final WorkflowConditionNode node, InstantiatorFactory instantiator, ClassLoader classLoader, WorkflowToken token) throws Exception {
final BasicWorkflowContext context = new BasicWorkflowContext(workflowSpec, token, program, programOptions, cConf, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, nodeStates, pluginInstantiator, secureStore, secureStoreManager, messagingService, node.getConditionSpecification(), metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory);
final Iterator<WorkflowNode> iterator;
Class<?> clz = classLoader.loadClass(node.getPredicateClassName());
Predicate<WorkflowContext> predicate = instantiator.get(TypeToken.of((Class<? extends Predicate<WorkflowContext>>) clz)).create();
if (!(predicate instanceof Condition)) {
iterator = predicate.apply(context) ? node.getIfBranch().iterator() : node.getElseBranch().iterator();
} else {
final Condition workflowCondition = (Condition) predicate;
Reflections.visit(workflowCondition, workflowCondition.getClass(), new PropertyFieldSetter(node.getConditionSpecification().getProperties()), new DataSetFieldSetter(context), new MetricsFieldSetter(context.getMetrics()));
TransactionControl defaultTxControl = workflowContext.getDefaultTxControl();
try {
// AbstractCondition implements final initialize(context) and requires subclass to
// implement initialize(), whereas conditions that directly implement Condition can
// override initialize(context)
TransactionControl txControl = workflowCondition instanceof AbstractCondition ? Transactions.getTransactionControl(defaultTxControl, AbstractCondition.class, workflowCondition, "initialize") : Transactions.getTransactionControl(defaultTxControl, Condition.class, workflowCondition, "initialize", WorkflowContext.class);
context.initializeProgram(workflowCondition, txControl, false);
boolean result = context.execute(() -> workflowCondition.apply(context));
iterator = result ? node.getIfBranch().iterator() : node.getElseBranch().iterator();
} finally {
TransactionControl txControl = Transactions.getTransactionControl(defaultTxControl, Condition.class, workflowCondition, "destroy");
context.destroyProgram(workflowCondition, txControl, false);
}
}
// If a workflow updates its token at a condition node, it will be persisted after the execution of the next node.
// However, the call below ensures that even if the workflow fails/crashes after a condition node, updates from the
// condition node are also persisted.
workflowStateWriter.setWorkflowToken(workflowRunId, token);
executeAll(iterator, appSpec, instantiator, classLoader, token);
}
use of io.cdap.cdap.api.workflow.WorkflowNode in project cdap by caskdata.
the class ProgramSystemMetadataWriter method getWorkflowNodes.
private Iterable<String> getWorkflowNodes() {
if (ProgramType.WORKFLOW != programId.getType()) {
return ImmutableSet.of();
}
Preconditions.checkArgument(programSpec instanceof WorkflowSpecification, "Expected programSpec %s to be of type WorkflowSpecification", programSpec);
WorkflowSpecification workflowSpec = (WorkflowSpecification) this.programSpec;
Set<String> workflowNodeNames = new HashSet<>();
for (Map.Entry<String, WorkflowNode> entry : workflowSpec.getNodeIdMap().entrySet()) {
WorkflowNode workflowNode = entry.getValue();
WorkflowNodeType type = workflowNode.getType();
// Fork nodes have integers as node ids. Ignore them in system metadata.
if (WorkflowNodeType.FORK == type) {
continue;
}
workflowNodeNames.add(entry.getKey());
}
return workflowNodeNames;
}
use of io.cdap.cdap.api.workflow.WorkflowNode in project cdap by caskdata.
the class WorkflowSpecificationCodec method deserialize.
@Override
public WorkflowSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
List<WorkflowNode> nodes = deserializeList(jsonObj.get("nodes"), context, WorkflowNode.class);
Map<String, DatasetCreationSpec> localDatasetSpec = deserializeMap(jsonObj.get("localDatasetSpecs"), context, DatasetCreationSpec.class);
return new WorkflowSpecification(className, name, description, properties, nodes, localDatasetSpec, plugins);
}
use of io.cdap.cdap.api.workflow.WorkflowNode in project cdap by cdapio.
the class ApplicationVerificationStage method verifyWorkflowFork.
private void verifyWorkflowFork(ApplicationSpecification appSpec, WorkflowSpecification workflowSpec, WorkflowNode node, Set<String> existingNodeNames) {
WorkflowForkNode forkNode = (WorkflowForkNode) node;
Preconditions.checkNotNull(forkNode.getBranches(), String.format("Fork is added in the Workflow '%s' without" + " any branches", workflowSpec.getName()));
for (List<WorkflowNode> branch : forkNode.getBranches()) {
verifyWorkflowNodeList(appSpec, workflowSpec, branch, existingNodeNames);
}
}
use of io.cdap.cdap.api.workflow.WorkflowNode in project cdap by cdapio.
the class DefaultStore method recordCompletedWorkflow.
private void recordCompletedWorkflow(AppMetadataStore metaStore, WorkflowTable workflowTable, WorkflowId workflowId, String runId) throws IOException, TableNotFoundException {
RunRecordDetail runRecord = metaStore.getRun(workflowId.run(runId));
if (runRecord == null) {
return;
}
ApplicationId app = workflowId.getParent();
ApplicationSpecification appSpec = getApplicationSpec(metaStore, app);
if (appSpec == null || appSpec.getWorkflows() == null || appSpec.getWorkflows().get(workflowId.getProgram()) == null) {
LOG.warn("Missing ApplicationSpecification for {}, " + "potentially caused by application removal right after stopping workflow {}", app, workflowId);
return;
}
boolean workFlowNodeFailed = false;
WorkflowSpecification workflowSpec = appSpec.getWorkflows().get(workflowId.getProgram());
Map<String, WorkflowNode> nodeIdMap = workflowSpec.getNodeIdMap();
List<WorkflowTable.ProgramRun> programRunsList = new ArrayList<>();
for (Map.Entry<String, String> entry : runRecord.getProperties().entrySet()) {
if (!("workflowToken".equals(entry.getKey()) || "runtimeArgs".equals(entry.getKey()) || "workflowNodeState".equals(entry.getKey()))) {
WorkflowActionNode workflowNode = (WorkflowActionNode) nodeIdMap.get(entry.getKey());
ProgramType programType = ProgramType.valueOfSchedulableType(workflowNode.getProgram().getProgramType());
ProgramId innerProgram = app.program(programType, entry.getKey());
RunRecordDetail innerProgramRun = metaStore.getRun(innerProgram.run(entry.getValue()));
if (innerProgramRun != null && innerProgramRun.getStatus().equals(ProgramRunStatus.COMPLETED)) {
Long stopTs = innerProgramRun.getStopTs();
// since the program is completed, the stop ts cannot be null
if (stopTs == null) {
LOG.warn("Since the program has completed, expected its stop time to not be null. " + "Not writing workflow completed record for Program = {}, Workflow = {}, Run = {}", innerProgram, workflowId, runRecord);
workFlowNodeFailed = true;
break;
}
programRunsList.add(new WorkflowTable.ProgramRun(entry.getKey(), entry.getValue(), programType, stopTs - innerProgramRun.getStartTs()));
} else {
workFlowNodeFailed = true;
break;
}
}
}
if (workFlowNodeFailed) {
return;
}
workflowTable.write(workflowId, runRecord, programRunsList);
}
Aggregations