use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class ProgramClient method getWorkflowCurrent.
/**
* Get the current run information for the Workflow based on the runid
*
* @param workflowId ID of the workflow
* @param runId ID of the run for which the details are to be returned
* @return list of {@link WorkflowActionNode} currently running for the given runid
* @throws IOException if a network error occurred
* @throws NotFoundException if the application, workflow, or runid could not be found
* @throws UnauthenticatedException if the request is not authorized successfully in the gateway server
*/
public List<WorkflowActionNode> getWorkflowCurrent(WorkflowId workflowId, String runId) throws IOException, NotFoundException, UnauthenticatedException, UnauthorizedException {
String path = String.format("/apps/%s/workflows/%s/runs/%s/current", workflowId.getApplication(), workflowId.getProgram(), runId);
URL url = config.resolveNamespacedURLV3(workflowId.getNamespaceId(), path);
HttpResponse response = restClient.execute(HttpMethod.GET, url, config.getAccessToken(), HttpURLConnection.HTTP_NOT_FOUND);
if (response.getResponseCode() == HttpURLConnection.HTTP_NOT_FOUND) {
throw new NotFoundException(workflowId.run(runId));
}
ObjectResponse<List<WorkflowActionNode>> objectResponse = ObjectResponse.fromJsonBody(response, new TypeToken<List<WorkflowActionNode>>() {
}.getType(), GSON);
return objectResponse.getResponseObject();
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class GetWorkflowCurrentRunCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
String[] programIdParts = arguments.get(elementType.getArgumentName().toString()).split("\\.");
ApplicationId appId = cliConfig.getCurrentNamespace().app(programIdParts[0]);
List<WorkflowActionNode> nodes;
if (elementType.getProgramType() != null) {
if (programIdParts.length < 2) {
throw new CommandInputError(this);
}
String workflowId = programIdParts[1];
String runId = arguments.get(ArgumentName.RUN_ID.toString());
nodes = programClient.getWorkflowCurrent(appId.workflow(workflowId), runId);
} else {
throw new IllegalArgumentException("Unrecognized program element type for current runs: " + elementType);
}
Table table = Table.builder().setHeader("node id", "program name", "program type").setRows(nodes, new RowMaker<WorkflowActionNode>() {
@Override
public List<?> makeRow(WorkflowActionNode object) {
return Lists.newArrayList(object.getNodeId(), object.getProgram().getProgramName(), object.getProgram().getProgramType());
}
}).build();
cliConfig.getTableRenderer().render(cliConfig, output, table);
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class WorkflowVerificationTest method verifyWorkflowWithLocalDatasetSpecification.
private void verifyWorkflowWithLocalDatasetSpecification(ApplicationSpecification appSpec) {
WorkflowSpecification spec = appSpec.getWorkflows().get("WorkflowWithLocalDatasets");
List<WorkflowNode> nodes = spec.getNodes();
Assert.assertTrue(nodes.size() == 2);
WorkflowNode node = nodes.get(0);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
WorkflowActionNode actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.MAPREDUCE, "MR1")));
node = nodes.get(1);
Assert.assertTrue(node.getType() == WorkflowNodeType.ACTION);
actionNode = (WorkflowActionNode) node;
Assert.assertTrue(actionNode.getProgram().equals(new ScheduleProgramInfo(SchedulableProgramType.SPARK, "SP1")));
Map<String, DatasetCreationSpec> localDatasetSpecs = spec.getLocalDatasetSpecs();
Assert.assertEquals(5, localDatasetSpecs.size());
DatasetCreationSpec datasetCreationSpec = localDatasetSpecs.get("mytable");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfile_with_properties");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("prop_value", datasetCreationSpec.getProperties().getProperties().get("prop_key"));
datasetCreationSpec = localDatasetSpecs.get("mytablefromtype");
Assert.assertEquals(Table.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals(0, datasetCreationSpec.getProperties().getProperties().size());
datasetCreationSpec = localDatasetSpecs.get("myfilefromtype");
Assert.assertEquals(FileSet.class.getName(), datasetCreationSpec.getTypeName());
Assert.assertEquals("another_prop_value", datasetCreationSpec.getProperties().getProperties().get("another_prop_key"));
// Check if the application specification has correct modules
Map<String, String> datasetModules = appSpec.getDatasetModules();
Assert.assertEquals(2, datasetModules.size());
Assert.assertTrue(datasetModules.containsKey(FileSet.class.getName()));
Assert.assertTrue(datasetModules.containsKey(Table.class.getName()));
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class DistributedWorkflowProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
WorkflowSpecification spec = program.getApplicationSpecification().getWorkflows().get(program.getName());
List<ClassAcceptor> acceptors = new ArrayList<>();
// Only interested in MapReduce and Spark nodes
Set<SchedulableProgramType> runnerTypes = EnumSet.of(SchedulableProgramType.MAPREDUCE, SchedulableProgramType.SPARK);
for (WorkflowActionNode node : Iterables.filter(spec.getNodeIdMap().values(), WorkflowActionNode.class)) {
// For each type, we only need one node to setup the launch context
ScheduleProgramInfo programInfo = node.getProgram();
if (!runnerTypes.remove(programInfo.getProgramType())) {
continue;
}
// Find the ProgramRunner of the given type and setup the launch context
ProgramType programType = ProgramType.valueOfSchedulableType(programInfo.getProgramType());
ProgramRunner runner = programRunnerFactory.create(programType);
try {
if (runner instanceof DistributedProgramRunner) {
// Call setupLaunchConfig with the corresponding program
ProgramId programId = program.getId().getParent().program(programType, programInfo.getProgramName());
((DistributedProgramRunner) runner).setupLaunchConfig(launchConfig, Programs.create(cConf, program, programId, runner), options, cConf, hConf, tempDir);
acceptors.add(launchConfig.getClassAcceptor());
}
} finally {
if (runner instanceof Closeable) {
Closeables.closeQuietly((Closeable) runner);
}
}
}
// Set the class acceptor
launchConfig.setClassAcceptor(new AndClassAcceptor(acceptors));
// Clear and set the runnable for the workflow driver
launchConfig.clearRunnables();
Resources defaultResources = findDriverResources(program.getApplicationSpecification().getSpark(), program.getApplicationSpecification().getMapReduce(), spec);
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1, options.getArguments().asMap(), defaultResources, 0);
}
use of co.cask.cdap.api.workflow.WorkflowActionNode in project cdap by caskdata.
the class WorkflowDriver method executeNode.
private void executeNode(ApplicationSpecification appSpec, WorkflowNode node, InstantiatorFactory instantiator, ClassLoader classLoader, WorkflowToken token) throws Exception {
WorkflowNodeType nodeType = node.getType();
((BasicWorkflowToken) token).setCurrentNode(node.getNodeId());
switch(nodeType) {
case ACTION:
WorkflowActionNode actionNode = (WorkflowActionNode) node;
if (SchedulableProgramType.CUSTOM_ACTION == actionNode.getProgram().getProgramType()) {
executeCustomAction(actionNode, instantiator, classLoader, token);
} else {
executeAction(actionNode, token);
}
break;
case FORK:
executeFork(appSpec, (WorkflowForkNode) node, instantiator, classLoader, token);
break;
case CONDITION:
executeCondition(appSpec, (WorkflowConditionNode) node, instantiator, classLoader, token);
break;
default:
break;
}
}
Aggregations