use of co.cask.cdap.proto.WorkflowNodeStateDetail in project cdap by caskdata.
the class DefaultStoreTest method testWorkflowNodeState.
@Test
public void testWorkflowNodeState() throws Exception {
String namespaceName = "namespace1";
String appName = "app1";
String workflowName = "workflow1";
String mapReduceName = "mapReduce1";
String sparkName = "spark1";
ApplicationId appId = Ids.namespace(namespaceName).app(appName);
ProgramId mapReduceProgram = appId.mr(mapReduceName);
ProgramId sparkProgram = appId.spark(sparkName);
long currentTime = System.currentTimeMillis();
String workflowRunId = RunIds.generate(currentTime).getId();
ProgramRunId workflowRun = appId.workflow(workflowName).run(workflowRunId);
// start Workflow
store.setStart(workflowRun.getParent(), workflowRun.getRun(), currentTime);
// start MapReduce as a part of Workflow
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapReduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId mapReduceRunId = RunIds.generate(currentTime + 10);
store.setStart(mapReduceProgram, mapReduceRunId.getId(), currentTime + 10, null, ImmutableMap.<String, String>of(), systemArgs);
// stop the MapReduce program
store.setStop(mapReduceProgram, mapReduceRunId.getId(), currentTime + 50, ProgramRunStatus.COMPLETED);
// start Spark program as a part of Workflow
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId sparkRunId = RunIds.generate(currentTime + 60);
store.setStart(sparkProgram, sparkRunId.getId(), currentTime + 60, null, ImmutableMap.<String, String>of(), systemArgs);
// stop the Spark program with failure
NullPointerException npe = new NullPointerException("dataset not found");
IllegalArgumentException iae = new IllegalArgumentException("illegal argument", npe);
store.setStop(sparkProgram, sparkRunId.getId(), currentTime + 100, ProgramRunStatus.FAILED, new BasicThrowable(iae));
// stop Workflow
store.setStop(workflowRun.getParent(), workflowRun.getRun(), currentTime + 110, ProgramRunStatus.FAILED);
List<WorkflowNodeStateDetail> nodeStateDetails = store.getWorkflowNodeStates(workflowRun);
Map<String, WorkflowNodeStateDetail> workflowNodeStates = new HashMap<>();
for (WorkflowNodeStateDetail nodeStateDetail : nodeStateDetails) {
workflowNodeStates.put(nodeStateDetail.getNodeId(), nodeStateDetail);
}
Assert.assertEquals(2, workflowNodeStates.size());
WorkflowNodeStateDetail nodeStateDetail = workflowNodeStates.get(mapReduceName);
Assert.assertEquals(mapReduceName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.COMPLETED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(mapReduceRunId.getId(), nodeStateDetail.getRunId());
Assert.assertNull(nodeStateDetail.getFailureCause());
nodeStateDetail = workflowNodeStates.get(sparkName);
Assert.assertEquals(sparkName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.FAILED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(sparkRunId.getId(), nodeStateDetail.getRunId());
BasicThrowable failureCause = nodeStateDetail.getFailureCause();
Assert.assertNotNull(failureCause);
Assert.assertTrue("illegal argument".equals(failureCause.getMessage()));
Assert.assertTrue("java.lang.IllegalArgumentException".equals(failureCause.getClassName()));
failureCause = failureCause.getCause();
Assert.assertNotNull(failureCause);
Assert.assertTrue("dataset not found".equals(failureCause.getMessage()));
Assert.assertTrue("java.lang.NullPointerException".equals(failureCause.getClassName()));
Assert.assertNull(failureCause.getCause());
}
use of co.cask.cdap.proto.WorkflowNodeStateDetail in project cdap by caskdata.
the class RemoteRuntimeStoreTest method testWorkflowMethods.
@Test
public void testWorkflowMethods() {
ProgramId workflowId = new ProgramId(Id.Namespace.DEFAULT.getId(), "test_app", ProgramType.WORKFLOW, "test_workflow");
long stopTime = System.currentTimeMillis() / 1000;
long startTime = stopTime - 20;
String pid = RunIds.generate(startTime * 1000).getId();
String twillRunId = "twill_run_id";
Map<String, String> runtimeArgs = ImmutableMap.of();
Map<String, String> properties = ImmutableMap.of("runtimeArgs", GSON.toJson(runtimeArgs));
Map<String, String> systemArgs = ImmutableMap.of();
RunRecordMeta initialRunRecord = new RunRecordMeta(pid, startTime, null, ProgramRunStatus.RUNNING, properties, systemArgs, twillRunId);
runtimeStore.setStart(workflowId, pid, startTime, twillRunId, runtimeArgs, systemArgs);
Assert.assertEquals(initialRunRecord, store.getRun(workflowId, pid));
ProgramId mapreduceId = new ProgramId(workflowId.getNamespace(), workflowId.getApplication(), ProgramType.MAPREDUCE, "test_mr");
String mapreducePid = RunIds.generate(startTime * 1000).getId();
// these system properties just have to be set on the system arguments of the program, in order for it to be
// understood as a program in a workflow node
Map<String, String> mrSystemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, "test_node_id", ProgramOptionConstants.WORKFLOW_NAME, workflowId.getProgram(), ProgramOptionConstants.WORKFLOW_RUN_ID, pid);
runtimeStore.setStart(mapreduceId, mapreducePid, startTime, twillRunId, runtimeArgs, mrSystemArgs);
BasicThrowable failureCause = new BasicThrowable(new IllegalArgumentException("failure", new RuntimeException("oops")));
runtimeStore.setStop(mapreduceId, mapreducePid, stopTime, ProgramRunStatus.FAILED, failureCause);
runtimeStore.setStop(workflowId, pid, stopTime, ProgramRunStatus.FAILED);
RunRecordMeta completedWorkflowRecord = store.getRun(workflowId, pid);
// we're not comparing properties, since runtime (such as starting/stopping inner programs) modifies it
Assert.assertEquals(pid, completedWorkflowRecord.getPid());
Assert.assertEquals(initialRunRecord.getStartTs(), completedWorkflowRecord.getStartTs());
Assert.assertEquals((Long) stopTime, completedWorkflowRecord.getStopTs());
Assert.assertEquals(ProgramRunStatus.FAILED, completedWorkflowRecord.getStatus());
Assert.assertEquals(twillRunId, completedWorkflowRecord.getTwillRunId());
Assert.assertEquals(systemArgs, completedWorkflowRecord.getSystemArgs());
// test that the BasicThrowable was serialized properly by RemoteRuntimeStore
ProgramRunId workflowRunId = workflowId.run(pid);
List<WorkflowNodeStateDetail> workflowNodeStates = store.getWorkflowNodeStates(workflowRunId);
Assert.assertEquals(1, workflowNodeStates.size());
WorkflowNodeStateDetail workflowNodeStateDetail = workflowNodeStates.get(0);
Assert.assertEquals("test_node_id", workflowNodeStateDetail.getNodeId());
Assert.assertEquals(mapreducePid, workflowNodeStateDetail.getRunId());
Assert.assertEquals(NodeStatus.FAILED, workflowNodeStateDetail.getNodeStatus());
Assert.assertEquals(failureCause, workflowNodeStateDetail.getFailureCause());
}
use of co.cask.cdap.proto.WorkflowNodeStateDetail in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowScopedArguments.
@Category(XSlowTests.class)
@Test
public void testWorkflowScopedArguments() throws Exception {
String workflowRunIdProperty = "workflowrunid";
HttpResponse response = deploy(WorkflowAppWithScopedParameters.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE2);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
ProgramId programId = Ids.namespace(TEST_NAMESPACE2).app(WorkflowAppWithScopedParameters.APP_NAME).workflow(WorkflowAppWithScopedParameters.ONE_WORKFLOW);
Map<String, String> runtimeArguments = Maps.newHashMap();
runtimeArguments.put("debug", "true");
runtimeArguments.put("mapreduce.*.debug", "false");
runtimeArguments.put("mapreduce.OneMR.debug", "true");
runtimeArguments.put("input.path", createInput("ProgramInput"));
runtimeArguments.put("mapreduce.OneMR.input.path", createInput("OneMRInput"));
runtimeArguments.put("mapreduce.OneMR.logical.start.time", "1234567890000");
runtimeArguments.put("mapreduce.AnotherMR.input.path", createInput("AnotherMRInput"));
runtimeArguments.put("spark.*.input.path", createInput("SparkInput"));
runtimeArguments.put("output.path", new File(tmpFolder.newFolder(), "ProgramOutput").getAbsolutePath());
runtimeArguments.put("mapreduce.OneMR.output.path", new File(tmpFolder.newFolder(), "OneMROutput").getAbsolutePath());
runtimeArguments.put("spark.AnotherSpark.output.path", new File(tmpFolder.newFolder(), "AnotherSparkOutput").getAbsolutePath());
runtimeArguments.put("mapreduce.*.processing.time", "1HR");
runtimeArguments.put("dataset.Purchase.cache.seconds", "30");
runtimeArguments.put("dataset.UserProfile.schema.property", "constant");
runtimeArguments.put("dataset.unknown.dataset", "false");
runtimeArguments.put("dataset.*.read.timeout", "60");
setAndTestRuntimeArgs(programId.toId(), runtimeArguments);
// Start the workflow
startProgram(programId.toId());
waitState(programId.toId(), ProgramStatus.RUNNING.name());
// Wait until we have a run record
verifyProgramRuns(programId.toId(), "running");
List<RunRecord> workflowHistoryRuns = getProgramRuns(programId.toId(), "running");
String workflowRunId = workflowHistoryRuns.get(0).getPid();
Id.Program mr1ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.MAPREDUCE, WorkflowAppWithScopedParameters.ONE_MR);
waitState(mr1ProgramId, ProgramStatus.RUNNING.name());
List<RunRecord> oneMRHistoryRuns = getProgramRuns(mr1ProgramId, "running");
String expectedMessage = String.format("Cannot stop the program '%s' started by the Workflow run '%s'. " + "Please stop the Workflow.", new Id.Run(mr1ProgramId, oneMRHistoryRuns.get(0).getPid()), workflowRunId);
stopProgram(mr1ProgramId, oneMRHistoryRuns.get(0).getPid(), 400, expectedMessage);
verifyProgramRuns(programId.toId(), "completed");
workflowHistoryRuns = getProgramRuns(programId.toId(), "completed");
oneMRHistoryRuns = getProgramRuns(mr1ProgramId, "completed");
Id.Program mr2ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.MAPREDUCE, WorkflowAppWithScopedParameters.ANOTHER_MR);
List<RunRecord> anotherMRHistoryRuns = getProgramRuns(mr2ProgramId, "completed");
Id.Program spark1ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.SPARK, WorkflowAppWithScopedParameters.ONE_SPARK);
List<RunRecord> oneSparkHistoryRuns = getProgramRuns(spark1ProgramId, "completed");
Id.Program spark2ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.SPARK, WorkflowAppWithScopedParameters.ANOTHER_SPARK);
List<RunRecord> anotherSparkHistoryRuns = getProgramRuns(spark2ProgramId, "completed");
Assert.assertEquals(1, workflowHistoryRuns.size());
Assert.assertEquals(1, oneMRHistoryRuns.size());
Assert.assertEquals(1, anotherMRHistoryRuns.size());
Assert.assertEquals(1, oneSparkHistoryRuns.size());
Assert.assertEquals(1, anotherSparkHistoryRuns.size());
Map<String, String> workflowRunRecordProperties = workflowHistoryRuns.get(0).getProperties();
Map<String, String> oneMRRunRecordProperties = oneMRHistoryRuns.get(0).getProperties();
Map<String, String> anotherMRRunRecordProperties = anotherMRHistoryRuns.get(0).getProperties();
Map<String, String> oneSparkRunRecordProperties = oneSparkHistoryRuns.get(0).getProperties();
Map<String, String> anotherSparkRunRecordProperties = anotherSparkHistoryRuns.get(0).getProperties();
Assert.assertNotNull(oneMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), oneMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(anotherMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), anotherMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(oneSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), oneSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(anotherSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), anotherSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ONE_MR), oneMRHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ONE_SPARK), oneSparkHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ANOTHER_MR), anotherMRHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ANOTHER_SPARK), anotherSparkHistoryRuns.get(0).getPid());
// Get Workflow node states
Map<String, WorkflowNodeStateDetail> nodeStates = getWorkflowNodeStates(programId, workflowHistoryRuns.get(0).getPid());
Assert.assertNotNull(nodeStates);
Assert.assertEquals(5, nodeStates.size());
WorkflowNodeStateDetail mrNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_MR);
Assert.assertNotNull(mrNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_MR, mrNodeState.getNodeId());
Assert.assertEquals(oneMRHistoryRuns.get(0).getPid(), mrNodeState.getRunId());
mrNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ANOTHER_MR);
Assert.assertNotNull(mrNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ANOTHER_MR, mrNodeState.getNodeId());
Assert.assertEquals(anotherMRHistoryRuns.get(0).getPid(), mrNodeState.getRunId());
WorkflowNodeStateDetail sparkNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_SPARK);
Assert.assertNotNull(sparkNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_SPARK, sparkNodeState.getNodeId());
Assert.assertEquals(oneSparkHistoryRuns.get(0).getPid(), sparkNodeState.getRunId());
sparkNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ANOTHER_SPARK);
Assert.assertNotNull(sparkNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ANOTHER_SPARK, sparkNodeState.getNodeId());
Assert.assertEquals(anotherSparkHistoryRuns.get(0).getPid(), sparkNodeState.getRunId());
WorkflowNodeStateDetail oneActionNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_ACTION);
Assert.assertNotNull(oneActionNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_ACTION, oneActionNodeState.getNodeId());
}
use of co.cask.cdap.proto.WorkflowNodeStateDetail in project cdap by caskdata.
the class AppMetadataStore method addWorkflowNodeState.
private void addWorkflowNodeState(ProgramId programId, String pid, Map<String, String> systemArgs, ProgramRunStatus status, @Nullable BasicThrowable failureCause) {
String workflowNodeId = systemArgs.get(ProgramOptionConstants.WORKFLOW_NODE_ID);
String workflowName = systemArgs.get(ProgramOptionConstants.WORKFLOW_NAME);
String workflowRun = systemArgs.get(ProgramOptionConstants.WORKFLOW_RUN_ID);
ApplicationId appId = Ids.namespace(programId.getNamespace()).app(programId.getApplication());
ProgramRunId workflowRunId = appId.workflow(workflowName).run(workflowRun);
// Node states will be stored with following key:
// workflowNodeState.namespace.app.WORKFLOW.workflowName.workflowRun.workflowNodeId
MDSKey key = getProgramKeyBuilder(TYPE_WORKFLOW_NODE_STATE, workflowRunId.getParent()).add(workflowRun).add(workflowNodeId).build();
WorkflowNodeStateDetail nodeStateDetail = new WorkflowNodeStateDetail(workflowNodeId, ProgramRunStatus.toNodeStatus(status), pid, failureCause);
write(key, nodeStateDetail);
// Get the run record of the Workflow which started this program
key = getProgramKeyBuilder(TYPE_RUN_RECORD_STARTED, workflowRunId.getParent()).add(workflowRunId.getRun()).build();
RunRecordMeta record = get(key, RunRecordMeta.class);
if (record != null) {
// Update the parent Workflow run record by adding node id and program run id in the properties
Map<String, String> properties = record.getProperties();
properties.put(workflowNodeId, pid);
write(key, new RunRecordMeta(record, properties));
}
}
use of co.cask.cdap.proto.WorkflowNodeStateDetail in project cdap by caskdata.
the class WorkflowClientTestRun method testWorkflowClient.
@Test
public void testWorkflowClient() throws Exception {
String keyValueTableType = "co.cask.cdap.api.dataset.lib.KeyValueTable";
String filesetType = "co.cask.cdap.api.dataset.lib.FileSet";
String outputPath = new File(TMP_FOLDER.newFolder(), "output").getAbsolutePath();
Map<String, String> runtimeArgs = ImmutableMap.of("inputPath", createInput("input"), "outputPath", outputPath, "dataset.*.keep.local", "true");
final WorkflowId workflowId = NamespaceId.DEFAULT.app(AppWithWorkflow.NAME).workflow(AppWithWorkflow.SampleWorkflow.NAME);
programClient.start(workflowId, false, runtimeArgs);
programClient.waitForStatus(workflowId, ProgramStatus.STOPPED, 60, TimeUnit.SECONDS);
Tasks.waitFor(1, new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return programClient.getProgramRuns(workflowId, ProgramRunStatus.COMPLETED.name(), 0, Long.MAX_VALUE, 10).size();
}
}, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
List<RunRecord> workflowRuns = programClient.getProgramRuns(workflowId, ProgramRunStatus.COMPLETED.name(), 0, Long.MAX_VALUE, 10);
Assert.assertEquals(1, workflowRuns.size());
String runId = workflowRuns.get(0).getPid();
ProgramRunId workflowRunId = workflowId.run(runId);
// Invalid test scenarios
try {
ProgramId nonExistentWorkflowId = new ProgramId(NamespaceId.DEFAULT.getNamespace(), AppWithWorkflow.NAME, ProgramType.WORKFLOW, "NonExistentWorkflow");
ProgramRunId nonExistentWorkflowRun = nonExistentWorkflowId.run(runId);
workflowClient.getWorkflowToken(nonExistentWorkflowRun);
Assert.fail("Should not find a workflow token for a non-existing workflow");
} catch (NotFoundException expected) {
// expected
}
try {
ProgramRunId invalidRunId = workflowId.run(RunIds.generate().getId());
workflowClient.getWorkflowToken(invalidRunId);
Assert.fail("Should not find a workflow token for a random run id");
} catch (NotFoundException expected) {
// expected
}
// Valid test scenarios
WorkflowTokenDetail workflowToken = workflowClient.getWorkflowToken(workflowRunId);
Assert.assertEquals(5, workflowToken.getTokenData().size());
workflowToken = workflowClient.getWorkflowToken(workflowRunId, WorkflowToken.Scope.SYSTEM);
Assert.assertTrue(workflowToken.getTokenData().size() > 0);
workflowToken = workflowClient.getWorkflowToken(workflowRunId, "start_time");
Map<String, List<WorkflowTokenDetail.NodeValueDetail>> tokenData = workflowToken.getTokenData();
Assert.assertEquals(AppWithWorkflow.WordCountMapReduce.NAME, tokenData.get("start_time").get(0).getNode());
Assert.assertTrue(Long.parseLong(tokenData.get("start_time").get(0).getValue()) < System.currentTimeMillis());
workflowToken = workflowClient.getWorkflowToken(workflowRunId, WorkflowToken.Scope.USER, "action_type");
tokenData = workflowToken.getTokenData();
Assert.assertEquals(AppWithWorkflow.WordCountMapReduce.NAME, tokenData.get("action_type").get(0).getNode());
Assert.assertEquals("MapReduce", tokenData.get("action_type").get(0).getValue());
String nodeName = AppWithWorkflow.SampleWorkflow.FIRST_ACTION;
WorkflowTokenNodeDetail workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName);
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, workflowTokenAtNode.getTokenDataAtNode().get(AppWithWorkflow.DummyAction.TOKEN_KEY));
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName, WorkflowToken.Scope.SYSTEM);
Assert.assertEquals(0, workflowTokenAtNode.getTokenDataAtNode().size());
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, nodeName, AppWithWorkflow.DummyAction.TOKEN_KEY);
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, workflowTokenAtNode.getTokenDataAtNode().get(AppWithWorkflow.DummyAction.TOKEN_KEY));
String reduceOutputRecordsCounter = "org.apache.hadoop.mapreduce.TaskCounter.REDUCE_OUTPUT_RECORDS";
workflowTokenAtNode = workflowClient.getWorkflowTokenAtNode(workflowRunId, AppWithWorkflow.WordCountMapReduce.NAME, WorkflowToken.Scope.SYSTEM, reduceOutputRecordsCounter);
Assert.assertEquals(6, Integer.parseInt(workflowTokenAtNode.getTokenDataAtNode().get(reduceOutputRecordsCounter)));
Map<String, DatasetSpecificationSummary> localDatasetSummaries = workflowClient.getWorkflowLocalDatasets(workflowRunId);
Assert.assertEquals(2, localDatasetSummaries.size());
DatasetSpecificationSummary keyValueTableSummary = new DatasetSpecificationSummary("MyTable." + runId, keyValueTableType, ImmutableMap.of("foo", "bar"));
Assert.assertEquals(keyValueTableSummary, localDatasetSummaries.get("MyTable"));
DatasetSpecificationSummary filesetSummary = new DatasetSpecificationSummary("MyFile." + runId, filesetType, ImmutableMap.of("anotherFoo", "anotherBar"));
Assert.assertEquals(filesetSummary, localDatasetSummaries.get("MyFile"));
workflowClient.deleteWorkflowLocalDatasets(workflowRunId);
localDatasetSummaries = workflowClient.getWorkflowLocalDatasets(workflowRunId);
Assert.assertEquals(0, localDatasetSummaries.size());
Map<String, WorkflowNodeStateDetail> nodeStates = workflowClient.getWorkflowNodeStates(workflowRunId);
Assert.assertEquals(3, nodeStates.size());
WorkflowNodeStateDetail nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.FIRST_ACTION);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.FIRST_ACTION.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.SECOND_ACTION);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.SECOND_ACTION.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
nodeState = nodeStates.get(AppWithWorkflow.SampleWorkflow.WORD_COUNT_MR);
Assert.assertTrue(AppWithWorkflow.SampleWorkflow.WORD_COUNT_MR.equals(nodeState.getNodeId()));
Assert.assertTrue(NodeStatus.COMPLETED == nodeState.getNodeStatus());
}
Aggregations