use of co.cask.cdap.internal.pipeline.StageContext in project cdap by caskdata.
the class ProgramGenerationStageTest method testProgramGenerationForToyApp.
@Test
public void testProgramGenerationForToyApp() throws Exception {
cConf.set(Constants.AppFabric.OUTPUT_DIR, "programs");
LocationFactory lf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
// have to do this since we are not going through the route of create namespace -> deploy application
// in real scenarios, the namespace directory would already be created
Location namespaceLocation = lf.create(DefaultId.APPLICATION.getNamespace());
Locations.mkdirsIfNotExists(namespaceLocation);
LocationFactory jarLf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appArchive = AppJarHelper.createDeploymentJar(jarLf, ToyApp.class);
ApplicationSpecification appSpec = Specifications.from(new ToyApp());
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
ProgramGenerationStage pgmStage = new ProgramGenerationStage(new NoOpAuthorizer(), new AuthenticationTestContext());
// Can do better here - fixed right now to run the test.
pgmStage.process(new StageContext(Object.class));
pgmStage.process(new ApplicationDeployable(NamespaceId.DEFAULT.artifact("ToyApp", "1.0"), appArchive, DefaultId.APPLICATION, newSpec, null, ApplicationDeployScope.USER));
Assert.assertTrue(true);
}
use of co.cask.cdap.internal.pipeline.StageContext in project cdap by caskdata.
the class SystemMetadataWriterStageTest method testWorkflowTags.
@Test
public void testWorkflowTags() throws Exception {
String appName = WorkflowAppWithFork.class.getSimpleName();
ApplicationId appId = NamespaceId.DEFAULT.app(appName);
String workflowName = WorkflowAppWithFork.WorkflowWithFork.class.getSimpleName();
ArtifactId artifactId = NamespaceId.DEFAULT.artifact(appId.getApplication(), "1.0");
ApplicationWithPrograms appWithPrograms = createAppWithWorkflow(artifactId, appId, workflowName);
WorkflowSpecification workflowSpec = appWithPrograms.getSpecification().getWorkflows().get(workflowName);
SystemMetadataWriterStage systemMetadataWriterStage = new SystemMetadataWriterStage(metadataStore);
StageContext stageContext = new StageContext(Object.class);
systemMetadataWriterStage.process(stageContext);
systemMetadataWriterStage.process(appWithPrograms);
// verify that the workflow is not tagged with the fork node name
Set<String> workflowSystemTags = metadataStore.getTags(MetadataScope.SYSTEM, appId.workflow(workflowName));
Sets.SetView<String> intersection = Sets.intersection(workflowSystemTags, getWorkflowForkNodes(workflowSpec));
Assert.assertTrue("Workflows should not be tagged with fork node names, but found the following fork nodes " + "in the workflow's system tags: " + intersection, intersection.isEmpty());
// verify that metadata was added for the workflow's schedule
Map<String, String> metadataProperties = metadataStore.getMetadata(MetadataScope.SYSTEM, appId).getProperties();
Assert.assertEquals(WorkflowAppWithFork.SCHED_NAME + ":testDescription", metadataProperties.get("schedule:" + WorkflowAppWithFork.SCHED_NAME));
}
Aggregations