use of io.cdap.cdap.api.workflow.Workflow in project cdap by caskdata.
the class WorkflowDriver method initializeWorkflow.
@SuppressWarnings("unchecked")
private Workflow initializeWorkflow() throws Exception {
Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader());
if (!Workflow.class.isAssignableFrom(clz)) {
throw new IllegalStateException(String.format("%s is not Workflow.", clz));
}
Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz;
final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create();
// set metrics
Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics()));
if (!(workflow instanceof ProgramLifecycle)) {
return workflow;
}
final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "initialize", WorkflowContext.class);
basicWorkflowToken.setCurrentNode(workflowSpec.getName());
workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null));
workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false);
workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken);
return workflow;
}
use of io.cdap.cdap.api.workflow.Workflow in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(ProgramLaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
acceptors.add(launchConfig.getClassAcceptor());
// Only interested in MapReduce and Spark nodes.
// This is because CUSTOM_ACTION types are running inside the driver
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
Iterable<ScheduleProgramInfo> programInfos = spec.getNodeIdMap().values().stream().filter(WorkflowActionNode.class::isInstance).map(WorkflowActionNode.class::cast).map(WorkflowActionNode::getProgram).filter(programInfo -> runnerTypes.contains(programInfo.getProgramType()))::iterator;
// Can't use Stream.forEach as we want to preserve the IOException being thrown
for (ScheduleProgramInfo programInfo : programInfos) {
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program.
// Need to constructs a new ProgramOptions with the scope extracted for the given program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
Map<String, String> programUserArgs = RuntimeArguments.extractScope(programId.getType().getScope(), programId.getProgram(), options.getUserArguments().asMap());
ProgramOptions programOptions = new SimpleProgramOptions(programId, options.getArguments(), new BasicArguments(programUserArgs));
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), programOptions, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Find out the default resources requirements based on the programs inside the workflow
// At least gives the Workflow driver 768 mb of container memory
Map<String, Resources> runnablesResources = Maps.transformValues(launchConfig.getRunnables(), this::getResources);
Resources defaultResources = maxResources(new Resources(768), findDriverResources(spec.getNodes(), runnablesResources));
// Clear and set the runnable for the workflow driver.
launchConfig.clearRunnables();
// Extract scoped runtime arguments that only meant for the workflow but not for child nodes
Map<String, String> runtimeArgs = RuntimeArguments.extractScope("task", "workflow", options.getUserArguments().asMap());
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1, runtimeArgs, defaultResources, 0);
}
Aggregations