use of io.cdap.cdap.logging.context.WorkflowLoggingContext in project cdap by caskdata.
the class MockLogReader method generateWorkflowLogs.
/**
* Generate Workflow logs.
*/
private void generateWorkflowLogs() {
ProgramId workflowId = SOME_WORKFLOW_APP.workflow(SOME_WORKFLOW);
long currentTime = TimeUnit.SECONDS.toMillis(10);
RunId workflowRunId = RunIds.generate(currentTime);
setStartAndRunning(workflowId.run(workflowRunId.getId()));
runRecordMap.put(workflowId, store.getRun(workflowId.run(workflowRunId.getId())));
WorkflowLoggingContext wfLoggingContext = new WorkflowLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId());
generateWorkflowRunLogs(wfLoggingContext);
// Generate logs for MapReduce program started by above Workflow run
ProgramId mapReduceId = SOME_WORKFLOW_APP.mr(SOME_MAPREDUCE);
currentTime = TimeUnit.SECONDS.toMillis(20);
RunId mapReduceRunId = RunIds.generate(currentTime);
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_MAPREDUCE, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(mapReduceId.run(mapReduceRunId.getId()), new HashMap<>(), systemArgs);
runRecordMap.put(mapReduceId, store.getRun(mapReduceId.run(mapReduceRunId.getId())));
WorkflowProgramLoggingContext context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.MAPREDUCE, SOME_MAPREDUCE, mapReduceRunId.getId());
generateWorkflowRunLogs(context);
// Generate logs for Spark program started by Workflow run above
ProgramId sparkId = SOME_WORKFLOW_APP.spark(SOME_SPARK);
currentTime = TimeUnit.SECONDS.toMillis(40);
RunId sparkRunId = RunIds.generate(currentTime);
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, SOME_SPARK, ProgramOptionConstants.WORKFLOW_NAME, SOME_WORKFLOW, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId.getId());
setStartAndRunning(sparkId.run(sparkRunId.getId()), new HashMap<>(), systemArgs);
runRecordMap.put(sparkId, store.getRun(sparkId.run(sparkRunId.getId())));
context = new WorkflowProgramLoggingContext(workflowId.getNamespace(), workflowId.getApplication(), workflowId.getProgram(), workflowRunId.getId(), ProgramType.SPARK, SOME_SPARK, sparkRunId.getId());
generateWorkflowRunLogs(context);
// Generate some more logs for Workflow
generateWorkflowRunLogs(wfLoggingContext);
}
use of io.cdap.cdap.logging.context.WorkflowLoggingContext in project cdap by caskdata.
the class LogsServiceMainTest method getLoggingEvents.
private ImmutableList<ILoggingEvent> getLoggingEvents() {
WorkflowLoggingContext loggingContext = new WorkflowLoggingContext("default", "app1", "myworkflow", "82b25495-35a2-11e9-b987-acde48001122");
long now = System.currentTimeMillis();
return ImmutableList.of(createLoggingEvent(Level.INFO, "0", now - 1000, loggingContext), createLoggingEvent(Level.INFO, "1", now - 900, loggingContext), createLoggingEvent(Level.INFO, "2", now - 700, loggingContext), createLoggingEvent(Level.DEBUG, "3", now - 600, loggingContext), createLoggingEvent(Level.INFO, "4", now - 500, loggingContext), createLoggingEvent(Level.INFO, "5", now - 100, loggingContext));
}
use of io.cdap.cdap.logging.context.WorkflowLoggingContext in project cdap by caskdata.
the class MockLogReader method generateLogs.
public void generateLogs() throws InterruptedException {
// Add logs for app testApp2, worker testWorker1
generateLogs(new WorkerLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp2", "testWorker1", "", ""), NamespaceId.DEFAULT.app("testApp2").worker("testWorker1"), ProgramRunStatus.RUNNING);
// Add logs for app testApp3, mapreduce testMapReduce1
generateLogs(new MapReduceLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp3", "testMapReduce1", ""), NamespaceId.DEFAULT.app("testApp3").mr("testMapReduce1"), ProgramRunStatus.SUSPENDED);
// Add logs for app testApp1, service testService1
generateLogs(new UserServiceLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testApp4", "testService1", "test1", "", ""), NamespaceId.DEFAULT.app("testApp4").service("testService1"), ProgramRunStatus.RUNNING);
// Add logs for app testApp1, mapreduce testMapReduce1
generateLogs(new MapReduceLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testTemplate1", "testMapReduce1", ""), TEST_NAMESPACE_ID.app("testTemplate1").mr("testMapReduce1"), ProgramRunStatus.COMPLETED);
// Add logs for app testApp1, worker testWorker1 in testNamespace
generateLogs(new WorkerLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testApp1", "testWorker1", "", ""), TEST_NAMESPACE_ID.app("testApp1").worker("testWorker1"), ProgramRunStatus.COMPLETED);
// Add logs for app testApp1, service testService1 in testNamespace
generateLogs(new UserServiceLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testApp4", "testService1", "test1", "", ""), TEST_NAMESPACE_ID.app("testApp4").service("testService1"), ProgramRunStatus.KILLED);
// Add logs for testWorkflow1 in testNamespace
generateLogs(new WorkflowLoggingContext(TEST_NAMESPACE_ID.getNamespace(), "testTemplate1", "testWorkflow1", "testRun1"), TEST_NAMESPACE_ID.app("testTemplate1").workflow("testWorkflow1"), ProgramRunStatus.COMPLETED);
// Add logs for testWorkflow1 in default namespace
generateLogs(new WorkflowLoggingContext(NamespaceId.DEFAULT.getEntityName(), "testTemplate1", "testWorkflow1", "testRun2"), NamespaceId.DEFAULT.app("testTemplate1").workflow("testWorkflow1"), ProgramRunStatus.COMPLETED);
generateWorkflowLogs();
}
Aggregations