use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class DefaultStoreTest method testLogProgramRunHistory.
@Test
public void testLogProgramRunHistory() {
Map<String, String> noRuntimeArgsProps = ImmutableMap.of("runtimeArgs", GSON.toJson(ImmutableMap.<String, String>of()));
// record finished Workflow
ProgramId programId = new ProgramId("account1", "application1", ProgramType.WORKFLOW, "wf1");
long now = System.currentTimeMillis();
long startTimeSecs = TimeUnit.MILLISECONDS.toSeconds(now);
RunId run1 = RunIds.generate(now - 20000);
ArtifactId artifactId = programId.getNamespaceId().artifact("testArtifact", "1.0").toApiArtifactId();
setStartAndRunning(programId.run(run1.getId()), artifactId);
store.setStop(programId.run(run1.getId()), startTimeSecs - 10, ProgramController.State.ERROR.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId));
// record another finished Workflow
RunId run2 = RunIds.generate(now - 10000);
setStartAndRunning(programId.run(run2.getId()), artifactId);
store.setStop(programId.run(run2.getId()), startTimeSecs - 5, ProgramController.State.COMPLETED.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId));
// record a suspended Workflow
RunId run21 = RunIds.generate(now - 7500);
setStartAndRunning(programId.run(run21.getId()), artifactId);
store.setSuspend(programId.run(run21.getId()), AppFabricTestHelper.createSourceId(++sourceId), -1);
// record not finished Workflow
RunId run3 = RunIds.generate(now);
setStartAndRunning(programId.run(run3.getId()), artifactId);
// For a RunRecordDetail that has not yet been completed, getStopTs should return null
RunRecordDetail runRecord = store.getRun(programId.run(run3.getId()));
Assert.assertNotNull(runRecord);
Assert.assertNull(runRecord.getStopTs());
// record run of different program
ProgramId programId2 = new ProgramId("account1", "application1", ProgramType.WORKFLOW, "wf2");
RunId run4 = RunIds.generate(now - 5000);
setStartAndRunning(programId2.run(run4.getId()), artifactId);
store.setStop(programId2.run(run4.getId()), startTimeSecs - 4, ProgramController.State.COMPLETED.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId));
// record for different account
setStartAndRunning(new ProgramId("account2", "application1", ProgramType.WORKFLOW, "wf1").run(run3.getId()), artifactId);
// we should probably be better with "get" method in DefaultStore interface to do that, but we don't have one
Map<ProgramRunId, RunRecordDetail> successHistorymap = store.getRuns(programId, ProgramRunStatus.COMPLETED, 0, Long.MAX_VALUE, Integer.MAX_VALUE);
Map<ProgramRunId, RunRecordDetail> failureHistorymap = store.getRuns(programId, ProgramRunStatus.FAILED, startTimeSecs - 20, startTimeSecs - 10, Integer.MAX_VALUE);
Assert.assertEquals(failureHistorymap, store.getRuns(programId, ProgramRunStatus.FAILED, 0, Long.MAX_VALUE, Integer.MAX_VALUE));
Map<ProgramRunId, RunRecordDetail> suspendedHistorymap = store.getRuns(programId, ProgramRunStatus.SUSPENDED, startTimeSecs - 20, startTimeSecs, Integer.MAX_VALUE);
// only finished + succeeded runs should be returned
Assert.assertEquals(1, successHistorymap.size());
// only finished + failed runs should be returned
Assert.assertEquals(1, failureHistorymap.size());
// only suspended runs should be returned
Assert.assertEquals(1, suspendedHistorymap.size());
// records should be sorted by start time latest to earliest
RunRecordDetail run = successHistorymap.values().iterator().next();
Assert.assertEquals(startTimeSecs - 10, run.getStartTs());
Assert.assertEquals(Long.valueOf(startTimeSecs - 5), run.getStopTs());
Assert.assertEquals(ProgramController.State.COMPLETED.getRunStatus(), run.getStatus());
run = failureHistorymap.values().iterator().next();
Assert.assertEquals(startTimeSecs - 20, run.getStartTs());
Assert.assertEquals(Long.valueOf(startTimeSecs - 10), run.getStopTs());
Assert.assertEquals(ProgramController.State.ERROR.getRunStatus(), run.getStatus());
run = suspendedHistorymap.values().iterator().next();
Assert.assertEquals(run21.getId(), run.getPid());
Assert.assertEquals(ProgramController.State.SUSPENDED.getRunStatus(), run.getStatus());
// Assert all history
Map<ProgramRunId, RunRecordDetail> allHistorymap = store.getRuns(programId, ProgramRunStatus.ALL, startTimeSecs - 20, startTimeSecs + 1, Integer.MAX_VALUE);
Assert.assertEquals(allHistorymap.toString(), 4, allHistorymap.size());
// Assert running programs
Map<ProgramRunId, RunRecordDetail> runningHistorymap = store.getRuns(programId, ProgramRunStatus.RUNNING, startTimeSecs, startTimeSecs + 1, 100);
Assert.assertEquals(1, runningHistorymap.size());
Assert.assertEquals(runningHistorymap, store.getRuns(programId, ProgramRunStatus.RUNNING, 0, Long.MAX_VALUE, 100));
// Get a run record for running program
RunRecordDetail expectedRunning = runningHistorymap.values().iterator().next();
Assert.assertNotNull(expectedRunning);
RunRecordDetail actualRunning = store.getRun(programId.run(expectedRunning.getPid()));
Assert.assertEquals(expectedRunning, actualRunning);
// Get a run record for completed run
RunRecordDetail expectedCompleted = successHistorymap.values().iterator().next();
Assert.assertNotNull(expectedCompleted);
RunRecordDetail actualCompleted = store.getRun(programId.run(expectedCompleted.getPid()));
Assert.assertEquals(expectedCompleted, actualCompleted);
// Get a run record for suspended run
RunRecordDetail expectedSuspended = suspendedHistorymap.values().iterator().next();
Assert.assertNotNull(expectedSuspended);
RunRecordDetail actualSuspended = store.getRun(programId.run(expectedSuspended.getPid()));
Assert.assertEquals(expectedSuspended, actualSuspended);
ProgramRunCluster emptyCluster = new ProgramRunCluster(ProgramRunClusterStatus.PROVISIONED, null, 0);
// Record workflow that starts but encounters error before it runs
RunId run7 = RunIds.generate(now);
Map<String, String> emptyArgs = ImmutableMap.of();
setStart(programId.run(run7.getId()), emptyArgs, emptyArgs, artifactId);
store.setStop(programId.run(run7.getId()), startTimeSecs + 1, ProgramController.State.ERROR.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId));
RunRecordDetail expectedRunRecord7 = RunRecordDetail.builder().setProgramRunId(programId.run(run7)).setStartTime(startTimeSecs).setStopTime(startTimeSecs + 1).setStatus(ProgramRunStatus.FAILED).setProperties(noRuntimeArgsProps).setCluster(emptyCluster).setArtifactId(artifactId).setSourceId(AppFabricTestHelper.createSourceId(sourceId)).build();
RunRecordDetail actualRecord7 = store.getRun(programId.run(run7.getId()));
Assert.assertEquals(expectedRunRecord7, actualRecord7);
// Record workflow that starts and suspends before it runs
RunId run8 = RunIds.generate(now);
setStart(programId.run(run8.getId()), emptyArgs, emptyArgs, artifactId);
store.setSuspend(programId.run(run8.getId()), AppFabricTestHelper.createSourceId(++sourceId), -1);
RunRecordDetail expectedRunRecord8 = RunRecordDetail.builder().setProgramRunId(programId.run(run8)).setStartTime(startTimeSecs).setStatus(ProgramRunStatus.SUSPENDED).setProperties(noRuntimeArgsProps).setCluster(emptyCluster).setArtifactId(artifactId).setSourceId(AppFabricTestHelper.createSourceId(sourceId)).build();
RunRecordDetail actualRecord8 = store.getRun(programId.run(run8.getId()));
Assert.assertEquals(expectedRunRecord8, actualRecord8);
// Record workflow that is killed while suspended
RunId run9 = RunIds.generate(now);
setStartAndRunning(programId.run(run9.getId()), artifactId);
store.setSuspend(programId.run(run9.getId()), AppFabricTestHelper.createSourceId(++sourceId), -1);
store.setStop(programId.run(run9.getId()), startTimeSecs + 5, ProgramRunStatus.KILLED, AppFabricTestHelper.createSourceId(++sourceId));
RunRecordDetail expectedRunRecord9 = RunRecordDetail.builder().setProgramRunId(programId.run(run9)).setStartTime(startTimeSecs).setRunTime(startTimeSecs + 1).setStopTime(startTimeSecs + 5).setStatus(ProgramRunStatus.KILLED).setProperties(noRuntimeArgsProps).setCluster(emptyCluster).setArtifactId(artifactId).setSourceId(AppFabricTestHelper.createSourceId(sourceId)).build();
RunRecordDetail actualRecord9 = store.getRun(programId.run(run9.getId()));
Assert.assertEquals(expectedRunRecord9, actualRecord9);
// Non-existent run record should give null
Assert.assertNull(store.getRun(programId.run(UUID.randomUUID().toString())));
// Searching for history in wrong time range should give us no results
Assert.assertTrue(store.getRuns(programId, ProgramRunStatus.COMPLETED, startTimeSecs - 5000, startTimeSecs - 2000, Integer.MAX_VALUE).isEmpty());
Assert.assertTrue(store.getRuns(programId, ProgramRunStatus.ALL, startTimeSecs - 5000, startTimeSecs - 2000, Integer.MAX_VALUE).isEmpty());
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class DefaultStoreTest method testWorkflowNodeState.
@Test
public void testWorkflowNodeState() {
String namespaceName = "namespace1";
String appName = "app1";
String workflowName = "workflow1";
String mapReduceName = "mapReduce1";
String sparkName = "spark1";
ApplicationId appId = Ids.namespace(namespaceName).app(appName);
ProgramId mapReduceProgram = appId.mr(mapReduceName);
ProgramId sparkProgram = appId.spark(sparkName);
long currentTime = System.currentTimeMillis();
String workflowRunId = RunIds.generate(currentTime).getId();
ProgramRunId workflowRun = appId.workflow(workflowName).run(workflowRunId);
ArtifactId artifactId = appId.getParent().artifact("testArtifact", "1.0").toApiArtifactId();
// start Workflow
setStartAndRunning(workflowRun, artifactId);
// start MapReduce as a part of Workflow
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapReduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId mapReduceRunId = RunIds.generate(currentTime + 10);
setStartAndRunning(mapReduceProgram.run(mapReduceRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the MapReduce program
store.setStop(mapReduceProgram.run(mapReduceRunId.getId()), currentTime + 50, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// start Spark program as a part of Workflow
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId sparkRunId = RunIds.generate(currentTime + 60);
setStartAndRunning(sparkProgram.run(sparkRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the Spark program with failure
NullPointerException npe = new NullPointerException("dataset not found");
IllegalArgumentException iae = new IllegalArgumentException("illegal argument", npe);
store.setStop(sparkProgram.run(sparkRunId.getId()), currentTime + 100, ProgramRunStatus.FAILED, new BasicThrowable(iae), AppFabricTestHelper.createSourceId(++sourceId));
// stop Workflow
store.setStop(workflowRun, currentTime + 110, ProgramRunStatus.FAILED, AppFabricTestHelper.createSourceId(++sourceId));
List<WorkflowNodeStateDetail> nodeStateDetails = store.getWorkflowNodeStates(workflowRun);
Map<String, WorkflowNodeStateDetail> workflowNodeStates = new HashMap<>();
for (WorkflowNodeStateDetail nodeStateDetail : nodeStateDetails) {
workflowNodeStates.put(nodeStateDetail.getNodeId(), nodeStateDetail);
}
Assert.assertEquals(2, workflowNodeStates.size());
WorkflowNodeStateDetail nodeStateDetail = workflowNodeStates.get(mapReduceName);
Assert.assertEquals(mapReduceName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.COMPLETED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(mapReduceRunId.getId(), nodeStateDetail.getRunId());
Assert.assertNull(nodeStateDetail.getFailureCause());
nodeStateDetail = workflowNodeStates.get(sparkName);
Assert.assertEquals(sparkName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.FAILED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(sparkRunId.getId(), nodeStateDetail.getRunId());
BasicThrowable failureCause = nodeStateDetail.getFailureCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("illegal argument", failureCause.getMessage());
Assert.assertEquals(IllegalArgumentException.class.getName(), failureCause.getClassName());
failureCause = failureCause.getCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("dataset not found", failureCause.getMessage());
Assert.assertEquals(NullPointerException.class.getName(), failureCause.getClassName());
Assert.assertNull(failureCause.getCause());
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class DefaultStoreTest method testRunningInRangeSimple.
@Test
public void testRunningInRangeSimple() {
NamespaceId ns = new NamespaceId("d");
ProgramRunId run1 = ns.app("a1").program(ProgramType.SERVICE, "f1").run(RunIds.generate(20000).getId());
ProgramRunId run2 = ns.app("a2").program(ProgramType.MAPREDUCE, "f2").run(RunIds.generate(10000).getId());
ProgramRunId run3 = ns.app("a3").program(ProgramType.WORKER, "f3").run(RunIds.generate(40000).getId());
ProgramRunId run4 = ns.app("a4").program(ProgramType.SERVICE, "f4").run(RunIds.generate(70000).getId());
ProgramRunId run5 = ns.app("a5").program(ProgramType.SPARK, "f5").run(RunIds.generate(30000).getId());
ProgramRunId run6 = ns.app("a6").program(ProgramType.WORKFLOW, "f6").run(RunIds.generate(60000).getId());
ArtifactId artifactId = ns.artifact("testArtifact", "1.0").toApiArtifactId();
writeStartRecord(run1, artifactId);
writeStartRecord(run2, artifactId);
writeStartRecord(run3, artifactId);
writeStartRecord(run4, artifactId);
writeStartRecord(run5, artifactId);
writeStartRecord(run6, artifactId);
Assert.assertEquals(runsToTime(run1, run2), runIdsToTime(store.getRunningInRange(1, 30)));
Assert.assertEquals(runsToTime(run1, run2, run5, run3), runIdsToTime(store.getRunningInRange(30, 50)));
Assert.assertEquals(runsToTime(run1, run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(1, 71)));
Assert.assertEquals(runsToTime(run1, run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(50, 71)));
Assert.assertEquals(ImmutableSet.of(), runIdsToTime(store.getRunningInRange(1, 10)));
writeStopRecord(run1, 45000);
writeStopRecord(run3, 55000);
writeSuspendedRecord(run5);
Assert.assertEquals(runsToTime(run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(50, 71)));
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class AppMetadataStoreTest method addProgramCount.
private List<ProgramRunId> addProgramCount(ProgramId programId, int count) throws Exception {
List<ProgramRunId> runIds = new ArrayList<>();
for (int i = 0; i < count; i++) {
RunId runId = RunIds.generate(i * 1000);
ProgramRunId run = programId.run(runId);
runIds.add(run);
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
recordProvisionAndStart(run, store);
});
}
return runIds;
}
use of io.cdap.cdap.proto.id.ProgramRunId in project cdap by caskdata.
the class AppMetadataStoreTest method testGetActiveRuns.
@Test
public void testGetActiveRuns() throws Exception {
// write a run record for each state for two programs in two apps in two namespaces
String app1 = "app1";
String app2 = "app2";
String program1 = "prog1";
String program2 = "prog2";
Collection<NamespaceId> namespaces = Arrays.asList(new NamespaceId("ns1"), new NamespaceId("ns2"));
Collection<ApplicationId> apps = namespaces.stream().flatMap(ns -> Stream.of(ns.app(app1), ns.app(app2))).collect(Collectors.toList());
Collection<ProgramId> programs = apps.stream().flatMap(app -> Stream.of(app.mr(program1), app.mr(program2))).collect(Collectors.toList());
for (ProgramId programId : programs) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
// one run in pending state
ProgramRunId runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
// one run in starting state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStart(runId, UUID.randomUUID().toString(), Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
// one run in running state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
String twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
// one in suspended state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramSuspend(runId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), System.currentTimeMillis());
// one run in stopping state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStopping(runId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), System.currentTimeMillis(), System.currentTimeMillis() + 1000);
// one run in each stopped state
for (ProgramRunStatus runStatus : ProgramRunStatus.values()) {
if (!runStatus.isEndState()) {
continue;
}
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStop(runId, System.currentTimeMillis(), runStatus, null, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
}
});
}
Set<ProgramRunStatus> activeStates = new HashSet<>();
activeStates.add(ProgramRunStatus.PENDING);
activeStates.add(ProgramRunStatus.STARTING);
activeStates.add(ProgramRunStatus.RUNNING);
activeStates.add(ProgramRunStatus.SUSPENDED);
activeStates.add(ProgramRunStatus.STOPPING);
// test the instance level method and namespace level method
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramId, Set<ProgramRunStatus>> allExpected = new HashMap<>();
Map<ProgramId, Set<ProgramRunStatus>> allActual = new HashMap<>();
// check active runs per namespace
for (NamespaceId namespace : namespaces) {
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(namespace);
// we expect 4 runs per program, with 4 programs in each namespace
Map<ProgramId, Set<ProgramRunStatus>> expected = new HashMap<>();
expected.put(namespace.app(app1).mr(program1), activeStates);
expected.put(namespace.app(app1).mr(program2), activeStates);
expected.put(namespace.app(app2).mr(program1), activeStates);
expected.put(namespace.app(app2).mr(program2), activeStates);
Map<ProgramId, Set<ProgramRunStatus>> actual = new HashMap<>();
actual.put(namespace.app(app1).mr(program1), new HashSet<>());
actual.put(namespace.app(app1).mr(program2), new HashSet<>());
actual.put(namespace.app(app2).mr(program1), new HashSet<>());
actual.put(namespace.app(app2).mr(program2), new HashSet<>());
allActual.putAll(actual);
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, actual.containsKey(activeRun.getKey().getParent()));
actual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(expected, actual);
allExpected.putAll(expected);
}
// test the instance level method
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : store.getActiveRuns(x -> true).entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, allActual.containsKey(activeRun.getKey().getParent()));
allActual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(allExpected, allActual);
// test the count-all method
Assert.assertEquals(store.getActiveRuns(x -> true).size(), store.countActiveRuns(null));
Assert.assertEquals(store.getActiveRuns(x -> true).size(), store.countActiveRuns(100));
Assert.assertEquals(2, store.countActiveRuns(2));
});
// check active runs per app
for (ApplicationId app : apps) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(app);
// we expect 3 runs per program, with 2 programs in each app
Map<ProgramId, Set<ProgramRunStatus>> expected = new HashMap<>();
expected.put(app.mr(program1), activeStates);
expected.put(app.mr(program2), activeStates);
Map<ProgramId, Set<ProgramRunStatus>> actual = new HashMap<>();
actual.put(app.mr(program1), new HashSet<>());
actual.put(app.mr(program2), new HashSet<>());
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, actual.containsKey(activeRun.getKey().getParent()));
actual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(expected, actual);
});
}
// check active runs per program
for (ProgramId program : programs) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(program);
Set<ProgramRunStatus> actual = new HashSet<>();
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
Assert.assertEquals(program, activeRun.getKey().getParent());
actual.add(activeRun.getValue().getStatus());
}
Assert.assertEquals(activeStates, actual);
});
}
}
Aggregations