use of co.cask.cdap.logging.context.WorkflowLoggingContext in project cdap by caskdata.
the class MockLogReader method generateLogs.
public void generateLogs() throws InterruptedException {
// Add logs for app testApp2, flow testFlow1
generateLogs(new FlowletLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp2", "testFlow1", "testFlowlet1", "", ""), NamespaceId.DEFAULT.app("testApp2").flow("testFlow1"), ProgramRunStatus.RUNNING);
// Add logs for app testApp3, mapreduce testMapReduce1
generateLogs(new MapReduceLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp3", "testMapReduce1", ""), NamespaceId.DEFAULT.app("testApp3").mr("testMapReduce1"), ProgramRunStatus.SUSPENDED);
// Add logs for app testApp1, service testService1
generateLogs(new UserServiceLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp4", "testService1", "test1", "", ""), NamespaceId.DEFAULT.app("testApp4").service("testService1"), ProgramRunStatus.RUNNING);
// Add logs for app testApp1, mapreduce testMapReduce1
generateLogs(new MapReduceLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testTemplate1", "testMapReduce1", ""), TEST_NAMESPACE_ID.app("testTemplate1").mr("testMapReduce1"), ProgramRunStatus.COMPLETED);
// Add logs for app testApp1, flow testFlow1 in testNamespace
generateLogs(new FlowletLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testApp1", "testFlow1", "testFlowlet1", "", ""), TEST_NAMESPACE_ID.app("testApp1").flow("testFlow1"), ProgramRunStatus.COMPLETED);
// Add logs for app testApp1, service testService1 in testNamespace
generateLogs(new UserServiceLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testApp4", "testService1", "test1", "", ""), TEST_NAMESPACE_ID.app("testApp4").service("testService1"), ProgramRunStatus.KILLED);
// Add logs for testWorkflow1 in testNamespace
generateLogs(new WorkflowLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testTemplate1", "testWorkflow1", "testRun1"), TEST_NAMESPACE_ID.app("testTemplate1").workflow("testWorkflow1"), ProgramRunStatus.COMPLETED);
// Add logs for testWorkflow1 in default namespace
generateLogs(new WorkflowLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testTemplate1", "testWorkflow1", "testRun2"), NamespaceId.DEFAULT.app("testTemplate1").workflow("testWorkflow1"), ProgramRunStatus.COMPLETED);
generateWorkflowLogs();
}
use of co.cask.cdap.logging.context.WorkflowLoggingContext in project cdap by caskdata.
the class MockLogReader method generateWorkflowLogs.
/**
* Generate Workflow logs.
*/
private void generateWorkflowLogs() {
ProgramId workflowId = SOME_WORKFLOW_APP.workflow(SOME_WORKFLOW);
long currentTime = TimeUnit.SECONDS.toMillis(10);
RunId workflowRunId = RunIds.generate();
setStartAndRunning(workflowId.run(workflowRunId.getId()), currentTime);
runRecordMap.put(workflowId, store.getRun(workflowId.run(workflowRunId.getId())));
WorkflowLoggingContext wfLoggingContext = new WorkflowLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId());
generateWorkflowRunLogs(wfLoggingContext);
// Generate logs for MapReduce program started by above Workflow run
ProgramId mapReduceId = SOME_WORKFLOW_APP.mr(SOME_MAPREDUCE);
currentTime = TimeUnit.SECONDS.toMillis(20);
RunId mapReduceRunId = RunIds.generate();
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_MAPREDUCE, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapReduceId.run(mapReduceRunId.getId()), currentTime, new HashMap<>(), systemArgs);
runRecordMap.put(mapReduceId, store.getRun(mapReduceId.run(mapReduceRunId.getId())));
WorkflowProgramLoggingContext context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.MAPREDUCE, SOME_MAPREDUCE, mapReduceRunId.getId());
generateWorkflowRunLogs(context);
// Generate logs for Spark program started by Workflow run above
ProgramId sparkId = SOME_WORKFLOW_APP.spark(SOME_SPARK);
currentTime = TimeUnit.SECONDS.toMillis(40);
RunId sparkRunId = RunIds.generate();
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_SPARK, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkId.run(sparkRunId.getId()), currentTime, new HashMap<>(), systemArgs);
runRecordMap.put(sparkId, store.getRun(sparkId.run(sparkRunId.getId())));
context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.SPARK, SOME_SPARK, sparkRunId.getId());
generateWorkflowRunLogs(context);
// Generate some more logs for Workflow
generateWorkflowRunLogs(wfLoggingContext);
}
Aggregations