use of co.cask.cdap.test.WorkflowManager in project cdap by caskdata.
the class AuthorizationTest method testScheduleAuth.
@Test
public void testScheduleAuth() throws Exception {
createAuthNamespace();
ApplicationId appId = AUTH_NAMESPACE.app(AppWithSchedule.class.getSimpleName());
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(appId, EnumSet.of(Action.ADMIN)).put(AUTH_NAMESPACE.artifact(AppWithSchedule.class.getSimpleName(), "1.0-SNAPSHOT"), EnumSet.of(Action.ADMIN)).put(AUTH_NAMESPACE.dataset(AppWithSchedule.INPUT_NAME), EnumSet.of(Action.ADMIN)).put(AUTH_NAMESPACE.dataset(AppWithSchedule.OUTPUT_NAME), EnumSet.of(Action.ADMIN)).put(AUTH_NAMESPACE.datasetType(ObjectStore.class.getName()), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, AppWithSchedule.class);
String workflowName = AppWithSchedule.SampleWorkflow.class.getSimpleName();
ProgramId workflowID = new ProgramId(AUTH_NAMESPACE.getNamespace(), AppWithSchedule.class.getSimpleName(), ProgramType.WORKFLOW, workflowName);
cleanUpEntities.add(workflowID);
final WorkflowManager workflowManager = appManager.getWorkflowManager(workflowName);
ScheduleManager scheduleManager = workflowManager.getSchedule(AppWithSchedule.EVERY_HOUR_SCHEDULE);
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
// try to resume schedule as BOB. It should fail since BOB does not have execute privileges on the programs
try {
scheduleManager.resume();
Assert.fail("Resuming schedule should have failed since BOB does not have EXECUTE on the program");
} catch (UnauthorizedException e) {
// Expected
}
// bob should also not be able see the status of the schedule
try {
scheduleManager.status(HttpURLConnection.HTTP_FORBIDDEN);
Assert.fail("Getting schedule status should have failed since BOB does not have any privilege on the program");
} catch (UnauthorizedException e) {
// Expected
}
// give BOB READ permission in the workflow
grantAndAssertSuccess(workflowID, BOB, EnumSet.of(Action.READ));
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
// try to resume schedule as BOB. It should fail since BOB has READ but not EXECUTE on the workflow
try {
scheduleManager.resume();
Assert.fail("Resuming schedule should have failed since BOB does not have EXECUTE on the program");
} catch (UnauthorizedException e) {
// Expected
}
// but BOB should be able to get schedule status now
Assert.assertEquals(ProgramScheduleStatus.SUSPENDED.name(), scheduleManager.status(HttpURLConnection.HTTP_OK));
// give BOB EXECUTE permission in the workflow
grantAndAssertSuccess(workflowID, BOB, EnumSet.of(Action.EXECUTE));
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
// try to resume the schedule. This should pass and workflow should run
scheduleManager.resume();
Assert.assertEquals(ProgramScheduleStatus.SCHEDULED.name(), scheduleManager.status(HttpURLConnection.HTTP_OK));
// suspend the schedule so that it does not start running again
scheduleManager.suspend();
Assert.assertEquals(ProgramScheduleStatus.SUSPENDED.name(), scheduleManager.status(HttpURLConnection.HTTP_OK));
ScheduleId scheduleId = new ScheduleId(appId.getNamespace(), appId.getApplication(), appId.getVersion(), "testSchedule");
ScheduleDetail scheduleDetail = new ScheduleDetail(AUTH_NAMESPACE.getNamespace(), AppWithSchedule.class.getSimpleName(), "1.0-SNAPSHOT", "testSchedule", "Something 2", new ScheduleProgramInfo(SchedulableProgramType.WORKFLOW, workflowName), Collections.<String, String>emptyMap(), new TimeTrigger("*/1 * * * *"), Collections.<Constraint>emptyList(), TimeUnit.HOURS.toMillis(6), null);
try {
addSchedule(scheduleId, scheduleDetail);
Assert.fail("Adding schedule should fail since BOB does not have AMDIN on the app");
} catch (UnauthorizedException e) {
// expected
}
// grant BOB ADMIN on the app
grantAndAssertSuccess(appId, BOB, EnumSet.of(Action.ADMIN));
// add schedule should succeed
addSchedule(scheduleId, scheduleDetail);
Assert.assertEquals(ProgramScheduleStatus.SUSPENDED.name(), workflowManager.getSchedule(scheduleId.getSchedule()).status(HttpURLConnection.HTTP_OK));
// update schedule should succeed
updateSchedule(scheduleId, scheduleDetail);
Assert.assertEquals(ProgramScheduleStatus.SUSPENDED.name(), workflowManager.getSchedule(scheduleId.getSchedule()).status(HttpURLConnection.HTTP_OK));
// revoke ADMIN from BOB
getAuthorizer().revoke(Authorizable.fromEntityId(appId), BOB, EnumSet.of(Action.ADMIN));
try {
// delete schedule should fail since we revoke the ADMIN privilege from BOB
deleteSchedule(scheduleId);
Assert.fail("Deleting schedule should fail since BOB does not have AMDIN on the app");
} catch (UnauthorizedException e) {
// expected
}
try {
updateSchedule(scheduleId, scheduleDetail);
Assert.fail("Updating schedule should fail since BOB does not have AMDIN on the app");
} catch (UnauthorizedException e) {
// expected
}
// grant BOB ADMIN on the app again
grantAndAssertSuccess(appId, BOB, EnumSet.of(Action.ADMIN));
deleteSchedule(scheduleId);
workflowManager.getSchedule(scheduleId.getSchedule()).status(HttpURLConnection.HTTP_NOT_FOUND);
// switch to Alice
SecurityRequestContext.setUserId(ALICE.getName());
}
use of co.cask.cdap.test.WorkflowManager in project cdap by caskdata.
the class SparkTest method testSparkProgramStatusSchedule.
@Test
public void testSparkProgramStatusSchedule() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
ScheduleId scheduleId = new ScheduleId(NamespaceId.DEFAULT.getNamespace(), TestSparkApp.class.getSimpleName(), "schedule");
appManager.enableSchedule(scheduleId);
// Start the upstream program
appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName()).start();
// Wait for the downstream to complete
WorkflowManager workflowManager = appManager.getWorkflowManager(TestSparkApp.TriggeredWorkflow.class.getSimpleName());
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
// Run again with the kryo serializer
appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName()).start(Collections.singletonMap("spark.serializer", "org.apache.spark.serializer.KryoSerializer"));
// Wait for the downstream to complete again
workflowManager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 5, TimeUnit.MINUTES);
}
use of co.cask.cdap.test.WorkflowManager in project cdap by caskdata.
the class WikipediaPipelineAppTest method test.
@Test
@Category(XSlowTests.class)
public void test() throws Exception {
WikipediaPipelineApp.WikipediaAppConfig appConfig = new WikipediaPipelineApp.WikipediaAppConfig();
AppRequest<WikipediaPipelineApp.WikipediaAppConfig> appRequest = new AppRequest<>(ARTIFACT_SUMMARY, appConfig);
ApplicationManager appManager = deployApplication(APP_ID, appRequest);
// Setup input streams with test data
createTestData();
WorkflowManager workflowManager = appManager.getWorkflowManager(WikipediaPipelineWorkflow.NAME);
// Test with default threshold. Workflow should not proceed beyond first condition.
testWorkflow(workflowManager, appConfig, 1);
// Test with a reduced threshold, so the workflow proceeds beyond the first predicate
testWorkflow(workflowManager, appConfig, 2, 1);
// Test K-Means
appConfig = new WikipediaPipelineApp.WikipediaAppConfig("kmeans");
appRequest = new AppRequest<>(ARTIFACT_SUMMARY, appConfig);
appManager = deployApplication(APP_ID, appRequest);
workflowManager = appManager.getWorkflowManager(WikipediaPipelineWorkflow.NAME);
testWorkflow(workflowManager, appConfig, 3, 1);
}
use of co.cask.cdap.test.WorkflowManager in project cdap by caskdata.
the class TestFrameworkTestRun method executeWorkflow.
private String executeWorkflow(ApplicationManager applicationManager, Map<String, String> additionalParams, int expectedComplete) throws Exception {
WorkflowManager wfManager = applicationManager.getWorkflowManager(WorkflowAppWithLocalDatasets.WORKFLOW_NAME);
Map<String, String> runtimeArgs = new HashMap<>();
File waitFile = new File(TMP_FOLDER.newFolder(), "/wait.file");
File doneFile = new File(TMP_FOLDER.newFolder(), "/done.file");
runtimeArgs.put("input.path", "input");
runtimeArgs.put("output.path", "output");
runtimeArgs.put("wait.file", waitFile.getAbsolutePath());
runtimeArgs.put("done.file", doneFile.getAbsolutePath());
runtimeArgs.putAll(additionalParams);
wfManager.start(runtimeArgs);
// Wait until custom action in the Workflow is triggered.
while (!waitFile.exists()) {
TimeUnit.MILLISECONDS.sleep(50);
}
// Now the Workflow should have RUNNING status. Get its runid.
List<RunRecord> history = wfManager.getHistory(ProgramRunStatus.RUNNING);
Assert.assertEquals(1, history.size());
String runId = history.get(0).getPid();
// Get the local datasets for this Workflow run
DataSetManager<KeyValueTable> localDataset = getDataset(testSpace.dataset(WorkflowAppWithLocalDatasets.WORDCOUNT_DATASET + "." + runId));
Assert.assertEquals("2", Bytes.toString(localDataset.get().read("text")));
DataSetManager<FileSet> fileSetDataset = getDataset(testSpace.dataset(WorkflowAppWithLocalDatasets.CSV_FILESET_DATASET + "." + runId));
Assert.assertNotNull(fileSetDataset.get());
// Local datasets should not exist at the namespace level
localDataset = getDataset(testSpace.dataset(WorkflowAppWithLocalDatasets.WORDCOUNT_DATASET));
Assert.assertNull(localDataset.get());
fileSetDataset = getDataset(testSpace.dataset(WorkflowAppWithLocalDatasets.CSV_FILESET_DATASET));
Assert.assertNull(fileSetDataset.get());
// Verify that the workflow hasn't completed on its own before we signal it to
history = wfManager.getHistory(ProgramRunStatus.RUNNING);
Assert.assertEquals(1, history.size());
// Signal the Workflow to continue
doneFile.createNewFile();
// Wait for workflow to finish
wfManager.waitForRuns(ProgramRunStatus.COMPLETED, expectedComplete, 1, TimeUnit.MINUTES);
Map<String, WorkflowNodeStateDetail> nodeStateDetailMap = wfManager.getWorkflowNodeStates(runId);
Map<String, String> workflowMetricsContext = new HashMap<>();
workflowMetricsContext.put(Constants.Metrics.Tag.NAMESPACE, testSpace.getNamespace());
workflowMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName());
workflowMetricsContext.put(Constants.Metrics.Tag.WORKFLOW, WorkflowAppWithLocalDatasets.WORKFLOW_NAME);
workflowMetricsContext.put(Constants.Metrics.Tag.RUN_ID, runId);
Map<String, String> writerContext = new HashMap<>(workflowMetricsContext);
writerContext.put(Constants.Metrics.Tag.NODE, WorkflowAppWithLocalDatasets.LocalDatasetWriter.class.getSimpleName());
Assert.assertEquals(2, getMetricsManager().getTotalMetric(writerContext, "user.num.lines"));
Map<String, String> wfSparkMetricsContext = new HashMap<>(workflowMetricsContext);
wfSparkMetricsContext.put(Constants.Metrics.Tag.NODE, "JavaSparkCSVToSpaceConverter");
Assert.assertEquals(2, getMetricsManager().getTotalMetric(wfSparkMetricsContext, "user.num.lines"));
// check in spark context
Map<String, String> sparkMetricsContext = new HashMap<>();
sparkMetricsContext.put(Constants.Metrics.Tag.NAMESPACE, testSpace.getNamespace());
sparkMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName());
sparkMetricsContext.put(Constants.Metrics.Tag.SPARK, "JavaSparkCSVToSpaceConverter");
sparkMetricsContext.put(Constants.Metrics.Tag.RUN_ID, nodeStateDetailMap.get("JavaSparkCSVToSpaceConverter").getRunId());
Assert.assertEquals(2, getMetricsManager().getTotalMetric(sparkMetricsContext, "user.num.lines"));
Map<String, String> appMetricsContext = new HashMap<>();
appMetricsContext.put(Constants.Metrics.Tag.NAMESPACE, testSpace.getNamespace());
appMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName());
// app metrics context should have sum from custom action and spark metrics.
Assert.assertEquals(4, getMetricsManager().getTotalMetric(appMetricsContext, "user.num.lines"));
Map<String, String> wfMRMetricsContext = new HashMap<>(workflowMetricsContext);
wfMRMetricsContext.put(Constants.Metrics.Tag.NODE, "WordCount");
Assert.assertEquals(7, getMetricsManager().getTotalMetric(wfMRMetricsContext, "user.num.words"));
// mr metrics context
Map<String, String> mrMetricsContext = new HashMap<>();
mrMetricsContext.put(Constants.Metrics.Tag.NAMESPACE, testSpace.getNamespace());
mrMetricsContext.put(Constants.Metrics.Tag.APP, applicationManager.getInfo().getName());
mrMetricsContext.put(Constants.Metrics.Tag.MAPREDUCE, "WordCount");
mrMetricsContext.put(Constants.Metrics.Tag.RUN_ID, nodeStateDetailMap.get("WordCount").getRunId());
Assert.assertEquals(7, getMetricsManager().getTotalMetric(mrMetricsContext, "user.num.words"));
final Map<String, String> readerContext = new HashMap<>(workflowMetricsContext);
readerContext.put(Constants.Metrics.Tag.NODE, "readerAction");
Tasks.waitFor(6L, new Callable<Long>() {
@Override
public Long call() throws Exception {
return getMetricsManager().getTotalMetric(readerContext, "user.unique.words");
}
}, 60, TimeUnit.SECONDS);
return runId;
}
use of co.cask.cdap.test.WorkflowManager in project cdap by caskdata.
the class TestFrameworkTestRun method testWorkflowStatus.
@Test
public void testWorkflowStatus() throws Exception {
ApplicationManager appManager = deployApplication(WorkflowStatusTestApp.class);
File workflowSuccess = new File(TMP_FOLDER.newFolder() + "/workflow.success");
File actionSuccess = new File(TMP_FOLDER.newFolder() + "/action.success");
File workflowKilled = new File(TMP_FOLDER.newFolder() + "/workflow.killed");
File firstFile = new File(TMP_FOLDER.newFolder() + "/first");
File firstFileDone = new File(TMP_FOLDER.newFolder() + "/first.done");
WorkflowManager workflowManager = appManager.getWorkflowManager(WorkflowStatusTestApp.WORKFLOW_NAME);
workflowManager.start(ImmutableMap.of("workflow.success.file", workflowSuccess.getAbsolutePath(), "action.success.file", actionSuccess.getAbsolutePath(), "throw.exception", "true"));
workflowManager.waitForRun(ProgramRunStatus.FAILED, 1, TimeUnit.MINUTES);
// Since action and workflow failed the files should not exist
Assert.assertFalse(workflowSuccess.exists());
Assert.assertFalse(actionSuccess.exists());
workflowManager.start(ImmutableMap.of("workflow.success.file", workflowSuccess.getAbsolutePath(), "action.success.file", actionSuccess.getAbsolutePath()));
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
Assert.assertTrue(workflowSuccess.exists());
Assert.assertTrue(actionSuccess.exists());
// Test the killed status
workflowManager.start(ImmutableMap.of("workflow.killed.file", workflowKilled.getAbsolutePath(), "first.file", firstFile.getAbsolutePath(), "first.done.file", firstFileDone.getAbsolutePath(), "test.killed", "true"));
verifyFileExists(Lists.newArrayList(firstFile));
workflowManager.stop();
workflowManager.waitForRun(ProgramRunStatus.KILLED, 1, TimeUnit.MINUTES);
Assert.assertTrue(workflowKilled.exists());
}
Aggregations