use of io.cdap.cdap.proto.id.WorkflowId in project cdap by caskdata.
the class WorkflowStatsSLAHttpHandlerTest method testCompare.
@Test
public void testCompare() throws Exception {
deploy(WorkflowApp.class, 200);
String workflowName = "FunWorkflow";
String mapreduceName = "ClassicWordCount";
String sparkName = "SparkWorkflowTest";
WorkflowId workflowProgram = WORKFLOW_APP.workflow(workflowName);
ProgramId mapreduceProgram = WORKFLOW_APP.mr(mapreduceName);
ProgramId sparkProgram = WORKFLOW_APP.spark(sparkName);
ArtifactId artifactId = WORKFLOW_APP.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
List<RunId> workflowRunIdList = setupRuns(workflowProgram, mapreduceProgram, sparkProgram, store, 2, artifactId);
RunId workflowRun1 = workflowRunIdList.get(0);
RunId workflowRun2 = workflowRunIdList.get(1);
String request = String.format("%s/namespaces/%s/apps/%s/workflows/%s/runs/%s/compare?other-run-id=%s", Constants.Gateway.API_VERSION_3, Id.Namespace.DEFAULT.getId(), WorkflowApp.class.getSimpleName(), workflowProgram.getProgram(), workflowRun1.getId(), workflowRun2.getId());
HttpResponse response = doGet(request);
Collection<WorkflowStatsComparison.ProgramNodes> workflowStatistics = readResponse(response, new TypeToken<Collection<WorkflowStatsComparison.ProgramNodes>>() {
}.getType());
Assert.assertNotNull(workflowStatistics.iterator().next());
Assert.assertEquals(2, workflowStatistics.size());
for (WorkflowStatsComparison.ProgramNodes node : workflowStatistics) {
if (node.getProgramType() == ProgramType.MAPREDUCE) {
Assert.assertEquals(38L, (long) node.getWorkflowProgramDetailsList().get(0).getMetrics().get(TaskCounter.MAP_INPUT_RECORDS.name()));
}
}
}
use of io.cdap.cdap.proto.id.WorkflowId in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowToken.
@Test
@SuppressWarnings("ConstantConditions")
public void testWorkflowToken() throws Exception {
deploy(AppWithWorkflow.class, 200);
Id.Application appId = Id.Application.from(Id.Namespace.DEFAULT, AppWithWorkflow.NAME);
final Id.Workflow workflowId = Id.Workflow.from(appId, AppWithWorkflow.SampleWorkflow.NAME);
String outputPath = new File(tmpFolder.newFolder(), "output").getAbsolutePath();
startProgram(workflowId, ImmutableMap.of("inputPath", createInput("input"), "outputPath", outputPath));
Tasks.waitFor(1, () -> getProgramRuns(workflowId, ProgramRunStatus.COMPLETED).size(), 60, TimeUnit.SECONDS);
List<RunRecord> programRuns = getProgramRuns(workflowId, ProgramRunStatus.COMPLETED);
Assert.assertEquals(1, programRuns.size());
RunRecord runRecord = programRuns.get(0);
String pid = runRecord.getPid();
// Verify entire worfklow token
WorkflowTokenDetail workflowTokenDetail = getWorkflowToken(workflowId, pid, null, null);
List<WorkflowTokenDetail.NodeValueDetail> nodeValueDetails = workflowTokenDetail.getTokenData().get(AppWithWorkflow.DummyAction.TOKEN_KEY);
Assert.assertEquals(2, nodeValueDetails.size());
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.FIRST_ACTION, nodeValueDetails.get(0).getNode());
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.SECOND_ACTION, nodeValueDetails.get(1).getNode());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, nodeValueDetails.get(0).getValue());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, nodeValueDetails.get(1).getValue());
// Verify entire workflow token by passing in the scope and key in the request
workflowTokenDetail = getWorkflowToken(workflowId, pid, WorkflowToken.Scope.USER, AppWithWorkflow.DummyAction.TOKEN_KEY);
nodeValueDetails = workflowTokenDetail.getTokenData().get(AppWithWorkflow.DummyAction.TOKEN_KEY);
Assert.assertEquals(2, nodeValueDetails.size());
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.FIRST_ACTION, nodeValueDetails.get(0).getNode());
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.SECOND_ACTION, nodeValueDetails.get(1).getNode());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, nodeValueDetails.get(0).getValue());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, nodeValueDetails.get(1).getValue());
// Get workflow level tokens
WorkflowTokenNodeDetail nodeDetail = getWorkflowToken(workflowId, pid, AppWithWorkflow.SampleWorkflow.NAME, WorkflowToken.Scope.USER, null);
Map<String, String> tokenData = nodeDetail.getTokenDataAtNode();
Assert.assertEquals(2, tokenData.size());
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.INITIALIZE_TOKEN_VALUE, tokenData.get(AppWithWorkflow.SampleWorkflow.INITIALIZE_TOKEN_KEY));
Assert.assertEquals(AppWithWorkflow.SampleWorkflow.DESTROY_TOKEN_SUCCESS_VALUE, tokenData.get(AppWithWorkflow.SampleWorkflow.DESTROY_TOKEN_KEY));
// Verify workflow token at a given node
WorkflowTokenNodeDetail tokenAtNode = getWorkflowToken(workflowId, pid, AppWithWorkflow.SampleWorkflow.FIRST_ACTION, null, null);
Map<String, String> tokenDataAtNode = tokenAtNode.getTokenDataAtNode();
Assert.assertEquals(1, tokenDataAtNode.size());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, tokenDataAtNode.get(AppWithWorkflow.DummyAction.TOKEN_KEY));
// Verify workflow token at a given node by passing in a scope and a key
tokenAtNode = getWorkflowToken(workflowId, pid, AppWithWorkflow.SampleWorkflow.FIRST_ACTION, WorkflowToken.Scope.USER, AppWithWorkflow.DummyAction.TOKEN_KEY);
tokenDataAtNode = tokenAtNode.getTokenDataAtNode();
Assert.assertEquals(1, tokenDataAtNode.size());
Assert.assertEquals(AppWithWorkflow.DummyAction.TOKEN_VALUE, tokenDataAtNode.get(AppWithWorkflow.DummyAction.TOKEN_KEY));
}
use of io.cdap.cdap.proto.id.WorkflowId in project cdap by caskdata.
the class WorkflowHttpHandlerTest method testWorkflowForkFailure.
@Ignore
@Test
public void testWorkflowForkFailure() throws Exception {
// Deploy an application containing workflow with fork. Fork executes MapReduce programs
// 'FirstMapReduce' and 'SecondMapReduce' in parallel. Workflow is started with runtime argument
// "mapreduce.SecondMapReduce.throw.exception", so that the MapReduce program 'SecondMapReduce'
// fails. This causes the 'FirstMapReduce' program to get killed and Workflow is marked as failed.
deploy(WorkflowFailureInForkApp.class, 200);
Id.Application appId = Id.Application.from(Id.Namespace.DEFAULT, WorkflowFailureInForkApp.NAME);
Id.Workflow workflowId = Id.Workflow.from(appId, WorkflowFailureInForkApp.WorkflowWithFailureInFork.NAME);
Id.Program firstMRId = Id.Program.from(appId, ProgramType.MAPREDUCE, WorkflowFailureInForkApp.FIRST_MAPREDUCE_NAME);
Id.Program secondMRId = Id.Program.from(appId, ProgramType.MAPREDUCE, WorkflowFailureInForkApp.SECOND_MAPREDUCE_NAME);
String outputPath = new File(tmpFolder.newFolder(), "output").getAbsolutePath();
File fileToSync = new File(tmpFolder.newFolder() + "/sync.file");
File fileToWait = new File(tmpFolder.newFolder() + "/wait.file");
startProgram(workflowId, ImmutableMap.of("inputPath", createInput("testWorkflowForkFailureInput"), "outputPath", outputPath, "sync.file", fileToSync.getAbsolutePath(), "wait.file", fileToWait.getAbsolutePath(), "mapreduce." + WorkflowFailureInForkApp.SECOND_MAPREDUCE_NAME + ".throw.exception", "true"));
waitState(workflowId, ProgramStatus.RUNNING.name());
waitState(workflowId, ProgramStatus.STOPPED.name());
verifyProgramRuns(workflowId, ProgramRunStatus.FAILED);
List<RunRecord> mapReduceProgramRuns = getProgramRuns(firstMRId, ProgramRunStatus.KILLED);
Assert.assertEquals(1, mapReduceProgramRuns.size());
mapReduceProgramRuns = getProgramRuns(secondMRId, ProgramRunStatus.FAILED);
Assert.assertEquals(1, mapReduceProgramRuns.size());
}
use of io.cdap.cdap.proto.id.WorkflowId in project cdap by caskdata.
the class DefaultStore method setStop.
@Override
public void setStop(ProgramRunId id, long endTime, ProgramRunStatus runStatus, BasicThrowable failureCause, byte[] sourceId) {
Preconditions.checkArgument(runStatus != null, "Run state of program run should be defined");
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore metaStore = getAppMetadataStore(context);
metaStore.recordProgramStop(id, endTime, runStatus, failureCause, sourceId);
// This block has been added so that completed workflow runs can be logged to the workflow dataset
WorkflowId workflowId = new WorkflowId(id.getParent().getParent(), id.getProgram());
if (id.getType() == ProgramType.WORKFLOW && runStatus == ProgramRunStatus.COMPLETED) {
WorkflowTable workflowTable = getWorkflowTable(context);
recordCompletedWorkflow(metaStore, workflowTable, workflowId, id.getRun());
}
// todo: delete old history data
});
}
use of io.cdap.cdap.proto.id.WorkflowId in project cdap by caskdata.
the class DataPipelineTest method deployPipelineWithSchedule.
private WorkflowManager deployPipelineWithSchedule(String pipelineName, Engine engine, String triggeringPipelineName, ArgumentMapping key1Mapping, String expectedKey1Value, PluginPropertyMapping key2Mapping, String expectedKey2Value) throws Exception {
String tableName = "actionScheduleTable" + pipelineName + engine;
String sourceName = "macroActionWithScheduleInput-" + pipelineName + engine;
String sinkName = "macroActionWithScheduleOutput-" + pipelineName + engine;
String key1 = key1Mapping.getTarget();
String key2 = key2Mapping.getTarget();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("action1", MockAction.getPlugin(tableName, "row1", "column1", String.format("${%s}", key1)))).addStage(new ETLStage("action2", MockAction.getPlugin(tableName, "row2", "column2", String.format("${%s}", key2)))).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter1", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key1)))).addStage(new ETLStage("filter2", StringValueFilterTransform.getPlugin("name", String.format("${%s}", key2)))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("action1", "action2").addConnection("action2", "source").addConnection("source", "filter1").addConnection("filter1", "filter2").addConnection("filter2", "sink").setEngine(engine).build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app(pipelineName);
ApplicationManager appManager = deployApplication(appId, appRequest);
// there should be only two programs - one workflow and one mapreduce/spark
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
// Use the expectedKey1Value and expectedKey2Value as values for two records, so that Only record "samuel"
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordKey1Value = StructuredRecord.builder(schema).set("name", expectedKey1Value).build();
StructuredRecord recordKey2Value = StructuredRecord.builder(schema).set("name", expectedKey2Value).build();
// write one record to each source
DataSetManager<Table> inputManager = getDataset(sourceName);
MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordKey1Value, recordKey2Value));
String defaultNamespace = NamespaceId.DEFAULT.getNamespace();
// Use properties from the triggering pipeline as values for runtime argument key1, key2
TriggeringPropertyMapping propertyMapping = new TriggeringPropertyMapping(ImmutableList.of(key1Mapping), ImmutableList.of(key2Mapping));
ProgramStatusTrigger completeTrigger = new ProgramStatusTrigger(new WorkflowId(defaultNamespace, triggeringPipelineName, SmartWorkflow.NAME), ImmutableSet.of(ProgramStatus.COMPLETED));
ScheduleId scheduleId = appId.schedule("completeSchedule");
appManager.addSchedule(new ScheduleDetail(scheduleId.getNamespace(), scheduleId.getApplication(), scheduleId.getVersion(), scheduleId.getSchedule(), "", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, SmartWorkflow.NAME), ImmutableMap.of(SmartWorkflow.TRIGGERING_PROPERTIES_MAPPING, GSON.toJson(propertyMapping)), completeTrigger, ImmutableList.of(), Schedulers.JOB_QUEUE_TIMEOUT_MILLIS, null, null));
appManager.enableSchedule(scheduleId);
return appManager.getWorkflowManager(SmartWorkflow.NAME);
}
Aggregations