use of co.cask.cdap.proto.RunRecord in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowRuns.
@Test
public void testWorkflowRuns() throws Exception {
String appName = "WorkflowAppWithErrorRuns";
String workflowName = "WorkflowWithErrorRuns";
HttpResponse response = deploy(WorkflowAppWithErrorRuns.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE2);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
Id.Program programId = Id.Program.from(TEST_NAMESPACE2, appName, ProgramType.WORKFLOW, workflowName);
// Test the "KILLED" state of the Workflow.
File instance1File = new File(tmpFolder.newFolder() + "/instance1.file");
File instance2File = new File(tmpFolder.newFolder() + "/instance2.file");
File doneFile = new File(tmpFolder.newFolder() + "/done.file");
// Start the first Workflow run.
Map<String, String> propertyMap = ImmutableMap.of("simple.action.file", instance1File.getAbsolutePath(), "simple.action.donefile", doneFile.getAbsolutePath());
startProgram(programId, propertyMap);
// Start another Workflow run.
propertyMap = ImmutableMap.of("simple.action.file", instance2File.getAbsolutePath(), "simple.action.donefile", doneFile.getAbsolutePath());
startProgram(programId, propertyMap);
// Wait until the execution of actions in both Workflow runs is started.
while (!(instance1File.exists() && instance2File.exists())) {
TimeUnit.MILLISECONDS.sleep(50);
}
// Verify that there are two runs of the Workflow currently running.
List<RunRecord> historyRuns = getProgramRuns(programId, "running");
Assert.assertEquals(2, historyRuns.size());
// Stop both Workflow runs.
String runId = historyRuns.get(0).getPid();
stopProgram(programId, runId, 200);
runId = historyRuns.get(1).getPid();
stopProgram(programId, runId, 200);
// Verify both runs should be marked "KILLED".
verifyProgramRuns(programId, "killed", 1);
// Test the "COMPLETE" state of the Workflow.
File instanceFile = new File(tmpFolder.newFolder() + "/instance.file");
propertyMap = ImmutableMap.of("simple.action.file", instanceFile.getAbsolutePath(), "simple.action.donefile", doneFile.getAbsolutePath());
startProgram(programId, propertyMap);
while (!instanceFile.exists()) {
TimeUnit.MILLISECONDS.sleep(50);
}
// Verify that currently only one run of the Workflow should be running.
historyRuns = getProgramRuns(programId, "running");
Assert.assertEquals(1, historyRuns.size());
Assert.assertTrue(doneFile.createNewFile());
// Verify that Workflow should move to "COMPLETED" state.
verifyProgramRuns(programId, "completed");
// Test the "FAILED" state of the program.
propertyMap = ImmutableMap.of("ThrowError", "true");
startProgram(programId, propertyMap);
// Verify that the Workflow should be marked as "FAILED".
verifyProgramRuns(programId, "failed");
}
use of co.cask.cdap.proto.RunRecord in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowForkFailure.
@Ignore
@Test
public void testWorkflowForkFailure() throws Exception {
// Deploy an application containing workflow with fork. Fork executes MapReduce programs
// 'FirstMapReduce' and 'SecondMapReduce' in parallel. Workflow is started with runtime argument
// "mapreduce.SecondMapReduce.throw.exception", so that the MapReduce program 'SecondMapReduce'
// fails. This causes the 'FirstMapReduce' program to get killed and Workflow is marked as failed.
Assert.assertEquals(200, deploy(WorkflowFailureInForkApp.class).getStatusLine().getStatusCode());
Id.Application appId = Id.Application.from(Id.Namespace.DEFAULT, WorkflowFailureInForkApp.NAME);
Id.Workflow workflowId = Id.Workflow.from(appId, WorkflowFailureInForkApp.WorkflowWithFailureInFork.NAME);
Id.Program firstMRId = Id.Program.from(appId, ProgramType.MAPREDUCE, WorkflowFailureInForkApp.FIRST_MAPREDUCE_NAME);
Id.Program secondMRId = Id.Program.from(appId, ProgramType.MAPREDUCE, WorkflowFailureInForkApp.SECOND_MAPREDUCE_NAME);
String outputPath = new File(tmpFolder.newFolder(), "output").getAbsolutePath();
File fileToSync = new File(tmpFolder.newFolder() + "/sync.file");
File fileToWait = new File(tmpFolder.newFolder() + "/wait.file");
startProgram(workflowId, ImmutableMap.of("inputPath", createInput("testWorkflowForkFailureInput"), "outputPath", outputPath, "sync.file", fileToSync.getAbsolutePath(), "wait.file", fileToWait.getAbsolutePath(), "mapreduce." + WorkflowFailureInForkApp.SECOND_MAPREDUCE_NAME + ".throw.exception", "true"));
waitState(workflowId, ProgramStatus.RUNNING.name());
waitState(workflowId, ProgramStatus.STOPPED.name());
verifyProgramRuns(workflowId, "failed");
List<RunRecord> mapReduceProgramRuns = getProgramRuns(firstMRId, ProgramRunStatus.KILLED.name());
Assert.assertEquals(1, mapReduceProgramRuns.size());
mapReduceProgramRuns = getProgramRuns(secondMRId, ProgramRunStatus.FAILED.name());
Assert.assertEquals(1, mapReduceProgramRuns.size());
}
use of co.cask.cdap.proto.RunRecord in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowScopedArguments.
@Category(XSlowTests.class)
@Test
public void testWorkflowScopedArguments() throws Exception {
String workflowRunIdProperty = "workflowrunid";
HttpResponse response = deploy(WorkflowAppWithScopedParameters.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE2);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
ProgramId programId = Ids.namespace(TEST_NAMESPACE2).app(WorkflowAppWithScopedParameters.APP_NAME).workflow(WorkflowAppWithScopedParameters.ONE_WORKFLOW);
Map<String, String> runtimeArguments = Maps.newHashMap();
runtimeArguments.put("debug", "true");
runtimeArguments.put("mapreduce.*.debug", "false");
runtimeArguments.put("mapreduce.OneMR.debug", "true");
runtimeArguments.put("input.path", createInput("ProgramInput"));
runtimeArguments.put("mapreduce.OneMR.input.path", createInput("OneMRInput"));
runtimeArguments.put("mapreduce.OneMR.logical.start.time", "1234567890000");
runtimeArguments.put("mapreduce.AnotherMR.input.path", createInput("AnotherMRInput"));
runtimeArguments.put("spark.*.input.path", createInput("SparkInput"));
runtimeArguments.put("output.path", new File(tmpFolder.newFolder(), "ProgramOutput").getAbsolutePath());
runtimeArguments.put("mapreduce.OneMR.output.path", new File(tmpFolder.newFolder(), "OneMROutput").getAbsolutePath());
runtimeArguments.put("spark.AnotherSpark.output.path", new File(tmpFolder.newFolder(), "AnotherSparkOutput").getAbsolutePath());
runtimeArguments.put("mapreduce.*.processing.time", "1HR");
runtimeArguments.put("dataset.Purchase.cache.seconds", "30");
runtimeArguments.put("dataset.UserProfile.schema.property", "constant");
runtimeArguments.put("dataset.unknown.dataset", "false");
runtimeArguments.put("dataset.*.read.timeout", "60");
setAndTestRuntimeArgs(programId.toId(), runtimeArguments);
// Start the workflow
startProgram(programId.toId());
waitState(programId.toId(), ProgramStatus.RUNNING.name());
// Wait until we have a run record
verifyProgramRuns(programId.toId(), "running");
List<RunRecord> workflowHistoryRuns = getProgramRuns(programId.toId(), "running");
String workflowRunId = workflowHistoryRuns.get(0).getPid();
Id.Program mr1ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.MAPREDUCE, WorkflowAppWithScopedParameters.ONE_MR);
waitState(mr1ProgramId, ProgramStatus.RUNNING.name());
List<RunRecord> oneMRHistoryRuns = getProgramRuns(mr1ProgramId, "running");
String expectedMessage = String.format("Cannot stop the program '%s' started by the Workflow run '%s'. " + "Please stop the Workflow.", new Id.Run(mr1ProgramId, oneMRHistoryRuns.get(0).getPid()), workflowRunId);
stopProgram(mr1ProgramId, oneMRHistoryRuns.get(0).getPid(), 400, expectedMessage);
verifyProgramRuns(programId.toId(), "completed");
workflowHistoryRuns = getProgramRuns(programId.toId(), "completed");
oneMRHistoryRuns = getProgramRuns(mr1ProgramId, "completed");
Id.Program mr2ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.MAPREDUCE, WorkflowAppWithScopedParameters.ANOTHER_MR);
List<RunRecord> anotherMRHistoryRuns = getProgramRuns(mr2ProgramId, "completed");
Id.Program spark1ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.SPARK, WorkflowAppWithScopedParameters.ONE_SPARK);
List<RunRecord> oneSparkHistoryRuns = getProgramRuns(spark1ProgramId, "completed");
Id.Program spark2ProgramId = Id.Program.from(TEST_NAMESPACE2, WorkflowAppWithScopedParameters.APP_NAME, ProgramType.SPARK, WorkflowAppWithScopedParameters.ANOTHER_SPARK);
List<RunRecord> anotherSparkHistoryRuns = getProgramRuns(spark2ProgramId, "completed");
Assert.assertEquals(1, workflowHistoryRuns.size());
Assert.assertEquals(1, oneMRHistoryRuns.size());
Assert.assertEquals(1, anotherMRHistoryRuns.size());
Assert.assertEquals(1, oneSparkHistoryRuns.size());
Assert.assertEquals(1, anotherSparkHistoryRuns.size());
Map<String, String> workflowRunRecordProperties = workflowHistoryRuns.get(0).getProperties();
Map<String, String> oneMRRunRecordProperties = oneMRHistoryRuns.get(0).getProperties();
Map<String, String> anotherMRRunRecordProperties = anotherMRHistoryRuns.get(0).getProperties();
Map<String, String> oneSparkRunRecordProperties = oneSparkHistoryRuns.get(0).getProperties();
Map<String, String> anotherSparkRunRecordProperties = anotherSparkHistoryRuns.get(0).getProperties();
Assert.assertNotNull(oneMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), oneMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(anotherMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), anotherMRRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(oneSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), oneSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertNotNull(anotherSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowHistoryRuns.get(0).getPid(), anotherSparkRunRecordProperties.get(workflowRunIdProperty));
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ONE_MR), oneMRHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ONE_SPARK), oneSparkHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ANOTHER_MR), anotherMRHistoryRuns.get(0).getPid());
Assert.assertEquals(workflowRunRecordProperties.get(WorkflowAppWithScopedParameters.ANOTHER_SPARK), anotherSparkHistoryRuns.get(0).getPid());
// Get Workflow node states
Map<String, WorkflowNodeStateDetail> nodeStates = getWorkflowNodeStates(programId, workflowHistoryRuns.get(0).getPid());
Assert.assertNotNull(nodeStates);
Assert.assertEquals(5, nodeStates.size());
WorkflowNodeStateDetail mrNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_MR);
Assert.assertNotNull(mrNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_MR, mrNodeState.getNodeId());
Assert.assertEquals(oneMRHistoryRuns.get(0).getPid(), mrNodeState.getRunId());
mrNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ANOTHER_MR);
Assert.assertNotNull(mrNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ANOTHER_MR, mrNodeState.getNodeId());
Assert.assertEquals(anotherMRHistoryRuns.get(0).getPid(), mrNodeState.getRunId());
WorkflowNodeStateDetail sparkNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_SPARK);
Assert.assertNotNull(sparkNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_SPARK, sparkNodeState.getNodeId());
Assert.assertEquals(oneSparkHistoryRuns.get(0).getPid(), sparkNodeState.getRunId());
sparkNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ANOTHER_SPARK);
Assert.assertNotNull(sparkNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ANOTHER_SPARK, sparkNodeState.getNodeId());
Assert.assertEquals(anotherSparkHistoryRuns.get(0).getPid(), sparkNodeState.getRunId());
WorkflowNodeStateDetail oneActionNodeState = nodeStates.get(WorkflowAppWithScopedParameters.ONE_ACTION);
Assert.assertNotNull(oneActionNodeState);
Assert.assertEquals(WorkflowAppWithScopedParameters.ONE_ACTION, oneActionNodeState.getNodeId());
}
use of co.cask.cdap.proto.RunRecord in project cdap by caskdata.
the class WorkflowHttpHandlerTest method verifyMultipleConcurrentRuns.
private void verifyMultipleConcurrentRuns(Id.Program workflowId) throws Exception {
verifyProgramRuns(workflowId, ProgramRunStatus.RUNNING.name(), 1);
List<RunRecord> historyRuns = getProgramRuns(workflowId, "running");
Assert.assertEquals(2, historyRuns.size());
HttpResponse response = getWorkflowCurrentStatus(workflowId, historyRuns.get(0).getPid());
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
String json = EntityUtils.toString(response.getEntity());
List<WorkflowActionNode> nodes = GSON.fromJson(json, LIST_WORKFLOWACTIONNODE_TYPE);
Assert.assertEquals(1, nodes.size());
Assert.assertEquals(ConcurrentWorkflowApp.SimpleAction.class.getSimpleName(), nodes.get(0).getProgram().getProgramName());
response = getWorkflowCurrentStatus(workflowId, historyRuns.get(1).getPid());
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
json = EntityUtils.toString(response.getEntity());
nodes = GSON.fromJson(json, LIST_WORKFLOWACTIONNODE_TYPE);
Assert.assertEquals(1, nodes.size());
Assert.assertEquals(ConcurrentWorkflowApp.SimpleAction.class.getSimpleName(), nodes.get(0).getProgram().getProgramName());
}
use of co.cask.cdap.proto.RunRecord in project cdap by caskdata.
the class ProgramLifecycleServiceTest method testInvalidFlowRunRecord.
@Test
public void testInvalidFlowRunRecord() throws Exception {
// Create App with Flow and the deploy
HttpResponse response = deploy(WordCountApp.class, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE1);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
final Id.Program wordcountFlow1 = Id.Program.from(TEST_NAMESPACE1, "WordCountApp", ProgramType.FLOW, "WordCountFlow");
// flow is stopped initially
Assert.assertEquals("STOPPED", getProgramStatus(wordcountFlow1));
// start a flow and check the status
startProgram(wordcountFlow1);
waitState(wordcountFlow1, ProgramRunStatus.RUNNING.toString());
// Wait until we have a run record
Tasks.waitFor(1, new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return getProgramRuns(wordcountFlow1, ProgramRunStatus.RUNNING.toString()).size();
}
}, 5, TimeUnit.SECONDS);
// Get the RunRecord
List<RunRecord> runRecords = getProgramRuns(wordcountFlow1, ProgramRunStatus.RUNNING.toString());
Assert.assertEquals(1, runRecords.size());
final RunRecord rr = runRecords.get(0);
// Check the RunRecords status
Assert.assertEquals(ProgramRunStatus.RUNNING, rr.getStatus());
// Lets set the runtime info to off
RuntimeInfo runtimeInfo = runtimeService.lookup(wordcountFlow1.toEntityId(), RunIds.fromString(rr.getPid()));
ProgramController programController = runtimeInfo.getController();
programController.stop();
// Verify that the status of that run is KILLED
Tasks.waitFor(ProgramRunStatus.KILLED, new Callable<ProgramRunStatus>() {
@Override
public ProgramRunStatus call() throws Exception {
RunRecordMeta runRecord = store.getRun(wordcountFlow1.toEntityId(), rr.getPid());
return runRecord == null ? null : runRecord.getStatus();
}
}, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Use the store manipulate state to be RUNNING
long now = System.currentTimeMillis();
long nowSecs = TimeUnit.MILLISECONDS.toSeconds(now);
store.setStart(wordcountFlow1.toEntityId(), rr.getPid(), nowSecs);
// Now check again via Store to assume data store is wrong.
RunRecord runRecordMeta = store.getRun(wordcountFlow1.toEntityId(), rr.getPid());
Assert.assertNotNull(runRecordMeta);
Assert.assertEquals(ProgramRunStatus.RUNNING, runRecordMeta.getStatus());
// Verify there is NO FAILED run record for the application
runRecords = getProgramRuns(wordcountFlow1, ProgramRunStatus.FAILED.toString());
Assert.assertEquals(0, runRecords.size());
// Lets fix it
Set<String> processedInvalidRunRecordIds = Sets.newHashSet();
programLifecycleService.validateAndCorrectRunningRunRecords(ProgramType.FLOW, processedInvalidRunRecordIds);
// Verify there is one FAILED run record for the application
runRecords = getProgramRuns(wordcountFlow1, ProgramRunStatus.FAILED.toString());
Assert.assertEquals(1, runRecords.size());
Assert.assertEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
}
Aggregations