use of io.cdap.cdap.proto.id.ProgramId in project cdap by caskdata.
the class DefaultStoreTest method testWorkflowNodeState.
@Test
public void testWorkflowNodeState() {
String namespaceName = "namespace1";
String appName = "app1";
String workflowName = "workflow1";
String mapReduceName = "mapReduce1";
String sparkName = "spark1";
ApplicationId appId = Ids.namespace(namespaceName).app(appName);
ProgramId mapReduceProgram = appId.mr(mapReduceName);
ProgramId sparkProgram = appId.spark(sparkName);
long currentTime = System.currentTimeMillis();
String workflowRunId = RunIds.generate(currentTime).getId();
ProgramRunId workflowRun = appId.workflow(workflowName).run(workflowRunId);
ArtifactId artifactId = appId.getParent().artifact("testArtifact", "1.0").toApiArtifactId();
// start Workflow
setStartAndRunning(workflowRun, artifactId);
// start MapReduce as a part of Workflow
Map<String, String> systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, mapReduceName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId mapReduceRunId = RunIds.generate(currentTime + 10);
setStartAndRunning(mapReduceProgram.run(mapReduceRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the MapReduce program
store.setStop(mapReduceProgram.run(mapReduceRunId.getId()), currentTime + 50, ProgramRunStatus.COMPLETED, AppFabricTestHelper.createSourceId(++sourceId));
// start Spark program as a part of Workflow
systemArgs = ImmutableMap.of(ProgramOptionConstants.WORKFLOW_NODE_ID, sparkName, ProgramOptionConstants.WORKFLOW_NAME, workflowName, ProgramOptionConstants.WORKFLOW_RUN_ID, workflowRunId);
RunId sparkRunId = RunIds.generate(currentTime + 60);
setStartAndRunning(sparkProgram.run(sparkRunId.getId()), ImmutableMap.of(), systemArgs, artifactId);
// stop the Spark program with failure
NullPointerException npe = new NullPointerException("dataset not found");
IllegalArgumentException iae = new IllegalArgumentException("illegal argument", npe);
store.setStop(sparkProgram.run(sparkRunId.getId()), currentTime + 100, ProgramRunStatus.FAILED, new BasicThrowable(iae), AppFabricTestHelper.createSourceId(++sourceId));
// stop Workflow
store.setStop(workflowRun, currentTime + 110, ProgramRunStatus.FAILED, AppFabricTestHelper.createSourceId(++sourceId));
List<WorkflowNodeStateDetail> nodeStateDetails = store.getWorkflowNodeStates(workflowRun);
Map<String, WorkflowNodeStateDetail> workflowNodeStates = new HashMap<>();
for (WorkflowNodeStateDetail nodeStateDetail : nodeStateDetails) {
workflowNodeStates.put(nodeStateDetail.getNodeId(), nodeStateDetail);
}
Assert.assertEquals(2, workflowNodeStates.size());
WorkflowNodeStateDetail nodeStateDetail = workflowNodeStates.get(mapReduceName);
Assert.assertEquals(mapReduceName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.COMPLETED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(mapReduceRunId.getId(), nodeStateDetail.getRunId());
Assert.assertNull(nodeStateDetail.getFailureCause());
nodeStateDetail = workflowNodeStates.get(sparkName);
Assert.assertEquals(sparkName, nodeStateDetail.getNodeId());
Assert.assertEquals(NodeStatus.FAILED, nodeStateDetail.getNodeStatus());
Assert.assertEquals(sparkRunId.getId(), nodeStateDetail.getRunId());
BasicThrowable failureCause = nodeStateDetail.getFailureCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("illegal argument", failureCause.getMessage());
Assert.assertEquals(IllegalArgumentException.class.getName(), failureCause.getClassName());
failureCause = failureCause.getCause();
Assert.assertNotNull(failureCause);
Assert.assertEquals("dataset not found", failureCause.getMessage());
Assert.assertEquals(NullPointerException.class.getName(), failureCause.getClassName());
Assert.assertNull(failureCause.getCause());
}
use of io.cdap.cdap.proto.id.ProgramId in project cdap by caskdata.
the class ProgramLifecycleHttpHandlerTest method testStartProgramWithDisabledProfile.
@Test
public void testStartProgramWithDisabledProfile() throws Exception {
// put my profile and disable it, using this profile to start program should fail
ProfileId profileId = new NamespaceId(TEST_NAMESPACE1).profile("MyProfile");
Profile profile = new Profile("MyProfile", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
putProfile(profileId, profile, 200);
disableProfile(profileId, 200);
// deploy, check the status
deploy(AppWithWorkflow.class, 200, Constants.Gateway.API_VERSION_3_TOKEN, TEST_NAMESPACE1);
ProgramId programId = new NamespaceId(TEST_NAMESPACE1).app(AppWithWorkflow.NAME).workflow(AppWithWorkflow.SampleWorkflow.NAME);
// workflow is stopped initially
Assert.assertEquals(STOPPED, getProgramStatus(programId));
// start workflow should give a 409 since we have a runtime argument associated with a disabled profile
startProgram(programId, Collections.singletonMap(SystemArguments.PROFILE_NAME, profileId.getScopedName()), 409);
Assert.assertEquals(STOPPED, getProgramStatus(programId));
// use native profile to start workflow should work since it is always enabled.
// the workflow should start but fail because we are not passing in required runtime args.
int runs = getProgramRuns(programId, ProgramRunStatus.FAILED).size();
startProgram(programId, Collections.singletonMap(SystemArguments.PROFILE_NAME, ProfileId.NATIVE.getScopedName()), 200);
// wait for the workflow to stop and check the status
Tasks.waitFor(runs + 1, () -> getProgramRuns(programId, ProgramRunStatus.FAILED).size(), 60, TimeUnit.SECONDS);
}
use of io.cdap.cdap.proto.id.ProgramId in project cdap by caskdata.
the class AppMetadataStoreTest method testGetActiveRuns.
@Test
public void testGetActiveRuns() throws Exception {
// write a run record for each state for two programs in two apps in two namespaces
String app1 = "app1";
String app2 = "app2";
String program1 = "prog1";
String program2 = "prog2";
Collection<NamespaceId> namespaces = Arrays.asList(new NamespaceId("ns1"), new NamespaceId("ns2"));
Collection<ApplicationId> apps = namespaces.stream().flatMap(ns -> Stream.of(ns.app(app1), ns.app(app2))).collect(Collectors.toList());
Collection<ProgramId> programs = apps.stream().flatMap(app -> Stream.of(app.mr(program1), app.mr(program2))).collect(Collectors.toList());
for (ProgramId programId : programs) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
// one run in pending state
ProgramRunId runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
// one run in starting state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStart(runId, UUID.randomUUID().toString(), Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
// one run in running state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
String twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
// one in suspended state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramSuspend(runId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), System.currentTimeMillis());
// one run in stopping state
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramRunning(runId, System.currentTimeMillis(), twillRunId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStopping(runId, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), System.currentTimeMillis(), System.currentTimeMillis() + 1000);
// one run in each stopped state
for (ProgramRunStatus runStatus : ProgramRunStatus.values()) {
if (!runStatus.isEndState()) {
continue;
}
runId = programId.run(RunIds.generate());
store.recordProgramProvisioning(runId, Collections.emptyMap(), SINGLETON_PROFILE_MAP, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()), ARTIFACT_ID);
store.recordProgramProvisioned(runId, 3, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
twillRunId = UUID.randomUUID().toString();
store.recordProgramStart(runId, twillRunId, Collections.emptyMap(), AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
store.recordProgramStop(runId, System.currentTimeMillis(), runStatus, null, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
}
});
}
Set<ProgramRunStatus> activeStates = new HashSet<>();
activeStates.add(ProgramRunStatus.PENDING);
activeStates.add(ProgramRunStatus.STARTING);
activeStates.add(ProgramRunStatus.RUNNING);
activeStates.add(ProgramRunStatus.SUSPENDED);
activeStates.add(ProgramRunStatus.STOPPING);
// test the instance level method and namespace level method
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramId, Set<ProgramRunStatus>> allExpected = new HashMap<>();
Map<ProgramId, Set<ProgramRunStatus>> allActual = new HashMap<>();
// check active runs per namespace
for (NamespaceId namespace : namespaces) {
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(namespace);
// we expect 4 runs per program, with 4 programs in each namespace
Map<ProgramId, Set<ProgramRunStatus>> expected = new HashMap<>();
expected.put(namespace.app(app1).mr(program1), activeStates);
expected.put(namespace.app(app1).mr(program2), activeStates);
expected.put(namespace.app(app2).mr(program1), activeStates);
expected.put(namespace.app(app2).mr(program2), activeStates);
Map<ProgramId, Set<ProgramRunStatus>> actual = new HashMap<>();
actual.put(namespace.app(app1).mr(program1), new HashSet<>());
actual.put(namespace.app(app1).mr(program2), new HashSet<>());
actual.put(namespace.app(app2).mr(program1), new HashSet<>());
actual.put(namespace.app(app2).mr(program2), new HashSet<>());
allActual.putAll(actual);
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, actual.containsKey(activeRun.getKey().getParent()));
actual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(expected, actual);
allExpected.putAll(expected);
}
// test the instance level method
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : store.getActiveRuns(x -> true).entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, allActual.containsKey(activeRun.getKey().getParent()));
allActual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(allExpected, allActual);
// test the count-all method
Assert.assertEquals(store.getActiveRuns(x -> true).size(), store.countActiveRuns(null));
Assert.assertEquals(store.getActiveRuns(x -> true).size(), store.countActiveRuns(100));
Assert.assertEquals(2, store.countActiveRuns(2));
});
// check active runs per app
for (ApplicationId app : apps) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(app);
// we expect 3 runs per program, with 2 programs in each app
Map<ProgramId, Set<ProgramRunStatus>> expected = new HashMap<>();
expected.put(app.mr(program1), activeStates);
expected.put(app.mr(program2), activeStates);
Map<ProgramId, Set<ProgramRunStatus>> actual = new HashMap<>();
actual.put(app.mr(program1), new HashSet<>());
actual.put(app.mr(program2), new HashSet<>());
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
ProgramId programId = activeRun.getKey().getParent();
Assert.assertTrue("Unexpected program returned: " + programId, actual.containsKey(activeRun.getKey().getParent()));
actual.get(programId).add(activeRun.getValue().getStatus());
}
Assert.assertEquals(expected, actual);
});
}
// check active runs per program
for (ProgramId program : programs) {
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore store = AppMetadataStore.create(context);
Map<ProgramRunId, RunRecordDetail> activeRuns = store.getActiveRuns(program);
Set<ProgramRunStatus> actual = new HashSet<>();
for (Map.Entry<ProgramRunId, RunRecordDetail> activeRun : activeRuns.entrySet()) {
Assert.assertEquals(program, activeRun.getKey().getParent());
actual.add(activeRun.getValue().getStatus());
}
Assert.assertEquals(activeStates, actual);
});
}
}
use of io.cdap.cdap.proto.id.ProgramId in project cdap by caskdata.
the class AppMetadataStoreTest method testScanRunningInRangeWithBatch.
@Test
public void testScanRunningInRangeWithBatch() throws Exception {
// Add some run records
TreeSet<Long> expected = new TreeSet<>();
for (int i = 0; i < 100; ++i) {
ApplicationId application = NamespaceId.DEFAULT.app("app" + i);
final ProgramId program = application.program(ProgramType.values()[i % ProgramType.values().length], "program" + i);
final RunId runId = RunIds.generate(runIdTime.incrementAndGet());
final ProgramRunId programRunId = program.run(runId);
expected.add(RunIds.getTime(runId, TimeUnit.MILLISECONDS));
// Start the program and stop it
final int j = i;
// A sourceId to keep incrementing for each call of app meta data store persisting
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore metadataStoreDataset = AppMetadataStore.create(context);
recordProvisionAndStart(programRunId, metadataStoreDataset);
metadataStoreDataset.recordProgramRunning(programRunId, RunIds.getTime(runId, TimeUnit.SECONDS) + 1, null, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
metadataStoreDataset.recordProgramStop(programRunId, RunIds.getTime(runId, TimeUnit.SECONDS), STOP_STATUSES.get(j % STOP_STATUSES.size()), null, AppFabricTestHelper.createSourceId(sourceId.incrementAndGet()));
});
}
// Run full scan
runScan(expected, 0, Long.MAX_VALUE);
// In all below assertions, TreeSet and metadataStore both have start time inclusive and end time exclusive.
// Run the scan with time limit
runScan(expected.subSet(30 * 10000L, 90 * 10000L), TimeUnit.MILLISECONDS.toSeconds(30 * 10000), TimeUnit.MILLISECONDS.toSeconds(90 * 10000));
runScan(expected.subSet(90 * 10000L, 101 * 10000L), TimeUnit.MILLISECONDS.toSeconds(90 * 10000), TimeUnit.MILLISECONDS.toSeconds(101 * 10000));
// After range
runScan(expected.subSet(101 * 10000L, 200 * 10000L), TimeUnit.MILLISECONDS.toSeconds(101 * 10000), TimeUnit.MILLISECONDS.toSeconds(200 * 10000));
// Identical start and end time
runScan(expected.subSet(31 * 10000L, 31 * 10000L), TimeUnit.MILLISECONDS.toSeconds(31 * 10000), TimeUnit.MILLISECONDS.toSeconds(31 * 10000));
// One unit difference between start and end time
runScan(expected.subSet(30 * 10000L, 31 * 10000L), TimeUnit.MILLISECONDS.toSeconds(30 * 10000), TimeUnit.MILLISECONDS.toSeconds(31 * 10000));
// Before range
runScan(expected.subSet(1000L, 10000L), TimeUnit.MILLISECONDS.toSeconds(1000), TimeUnit.MILLISECONDS.toSeconds(10000));
}
use of io.cdap.cdap.proto.id.ProgramId in project cdap by caskdata.
the class AppMetadataStoreTest method testStoppingStatusPersistence.
@Test
public void testStoppingStatusPersistence() {
ApplicationId application = NamespaceId.DEFAULT.app("app");
final ProgramId program = application.program(ProgramType.WORKFLOW, "program");
final RunId runId = RunIds.generate(runIdTime.incrementAndGet());
final ProgramRunId programRunId = program.run(runId);
TransactionRunners.run(transactionRunner, context -> {
AppMetadataStore metadataStoreDataset = AppMetadataStore.create(context);
recordProgramStopping(runId, programRunId, metadataStoreDataset);
RunRecordDetail runRecordMeta = metadataStoreDataset.getRun(programRunId);
Assert.assertEquals(ProgramRunStatus.STOPPING, runRecordMeta.getStatus());
});
}
Aggregations