use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class WorkflowNodeCreator method createWorkflowCustomActionNode.
static WorkflowNode createWorkflowCustomActionNode(CustomAction action, Id.Namespace deployNamespace, Id.Artifact artifactId, ArtifactRepository artifactRepository, PluginInstantiator pluginInstantiator) {
Preconditions.checkArgument(action != null, "CustomAction is null.");
CustomActionSpecification spec = DefaultCustomActionConfigurer.configureAction(action, deployNamespace, artifactId, artifactRepository, pluginInstantiator);
return new WorkflowActionNode(spec.getName(), spec);
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class WorkflowNodeCreator method createWorkflowCustomActionNode.
@Deprecated
static WorkflowNode createWorkflowCustomActionNode(WorkflowAction action) {
Preconditions.checkArgument(action != null, "WorkflowAction is null.");
WorkflowActionSpecification spec = DefaultWorkflowActionConfigurer.configureAction(action);
return new WorkflowActionNode(spec.getName(), spec);
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class ApplicationVerificationStage method verifyWorkflowAction.
private void verifyWorkflowAction(ApplicationSpecification appSpec, WorkflowNode node) {
WorkflowActionNode actionNode = (WorkflowActionNode) node;
ScheduleProgramInfo program = actionNode.getProgram();
switch(program.getProgramType()) {
case MAPREDUCE:
Preconditions.checkArgument(appSpec.getMapReduce().containsKey(program.getProgramName()), String.format("MapReduce program '%s' is not configured with the Application.", program.getProgramName()));
break;
case SPARK:
Preconditions.checkArgument(appSpec.getSpark().containsKey(program.getProgramName()), String.format("Spark program '%s' is not configured with the Application.", program.getProgramName()));
break;
case CUSTOM_ACTION:
// no-op
break;
default:
throw new RuntimeException(String.format("Unknown Program '%s' in the Workflow.", program.getProgramName()));
}
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method findDriverResources.
/**
* Returns the {@link Resources} requirement for the workflow runnable deduced by Spark
* or MapReduce driver resources requirement.
*/
private Resources findDriverResources(Map<String, SparkSpecification> sparkSpecs, Map<String, MapReduceSpecification> mrSpecs, WorkflowSpecification spec) {
// Find the resource requirements from the workflow with 768MB as minimum.
// It is the largest memory and cores from all Spark and MapReduce programs inside the workflow
Resources resources = new Resources(768);
for (WorkflowNode node : spec.getNodeIdMap().values()) {
if (WorkflowNodeType.ACTION == node.getType()) {
ScheduleProgramInfo programInfo = ((WorkflowActionNode) node).getProgram();
SchedulableProgramType programType = programInfo.getProgramType();
if (programType == SchedulableProgramType.SPARK || programType == SchedulableProgramType.MAPREDUCE) {
// The program spec shouldn't be null, otherwise the Workflow is not valid
Resources driverResources;
if (programType == SchedulableProgramType.SPARK) {
driverResources = sparkSpecs.get(programInfo.getProgramName()).getClientResources();
} else {
driverResources = mrSpecs.get(programInfo.getProgramName()).getDriverResources();
}
if (driverResources != null) {
resources = max(resources, driverResources);
}
}
}
}
return resources;
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class DefaultStore method recordCompletedWorkflow.
private void recordCompletedWorkflow(AppMetadataStore metaStore, WorkflowDataset workflowDataset, WorkflowId workflowId, String runId) {
RunRecordMeta runRecord = metaStore.getRun(workflowId.run(runId));
if (runRecord == null) {
return;
}
ApplicationId app = workflowId.getParent();
ApplicationSpecification appSpec = getApplicationSpec(metaStore, app);
if (appSpec == null || appSpec.getWorkflows() == null || appSpec.getWorkflows().get(workflowId.getProgram()) == null) {
LOG.warn("Missing ApplicationSpecification for {}, " + "potentially caused by application removal right after stopping workflow {}", app, workflowId);
return;
}
boolean workFlowNodeFailed = false;
WorkflowSpecification workflowSpec = appSpec.getWorkflows().get(workflowId.getProgram());
Map<String, WorkflowNode> nodeIdMap = workflowSpec.getNodeIdMap();
List<WorkflowDataset.ProgramRun> programRunsList = new ArrayList<>();
for (Map.Entry<String, String> entry : runRecord.getProperties().entrySet()) {
if (!("workflowToken".equals(entry.getKey()) || "runtimeArgs".equals(entry.getKey()) || "workflowNodeState".equals(entry.getKey()))) {
WorkflowActionNode workflowNode = (WorkflowActionNode) nodeIdMap.get(entry.getKey());
ProgramType programType = ProgramType.valueOfSchedulableType(workflowNode.getProgram().getProgramType());
ProgramId innerProgram = app.program(programType, entry.getKey());
RunRecordMeta innerProgramRun = metaStore.getRun(innerProgram.run(entry.getValue()));
if (innerProgramRun != null && innerProgramRun.getStatus().equals(ProgramRunStatus.COMPLETED)) {
Long stopTs = innerProgramRun.getStopTs();
// since the program is completed, the stop ts cannot be null
if (stopTs == null) {
LOG.warn("Since the program has completed, expected its stop time to not be null. " + "Not writing workflow completed record for Program = {}, Workflow = {}, Run = {}", innerProgram, workflowId, runRecord);
workFlowNodeFailed = true;
break;
}
programRunsList.add(new WorkflowDataset.ProgramRun(entry.getKey(), entry.getValue(), programType, stopTs - innerProgramRun.getStartTs()));
} else {
workFlowNodeFailed = true;
break;
}
}
}
if (workFlowNodeFailed) {
return;
}
workflowDataset.write(workflowId, runRecord, programRunsList);
}
Aggregations