use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class SimpleQueueSpecificationGeneratorTest method testQueueSpecificationGenWithWordCount.
@Test
public void testQueueSpecificationGenWithWordCount() throws Exception {
ApplicationSpecification appSpec = Specifications.from(new WordCountApp());
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
QueueSpecificationGenerator generator = new SimpleQueueSpecificationGenerator(NamespaceId.DEFAULT.app(newSpec.getName()));
table = generator.create(newSpec.getFlows().values().iterator().next());
Assert.assertEquals(get(FlowletConnection.Type.STREAM, "text", "StreamSource").iterator().next().getQueueName().toString(), String.format("stream:///%s/text", TEST_NAMESPACE_ID));
Assert.assertEquals(get(FlowletConnection.Type.FLOWLET, "StreamSource", "Tokenizer").iterator().next().getQueueName().toString(), String.format("queue:///%s/WordCountApp/WordCountFlow/StreamSource/queue", Id.Namespace.DEFAULT.getId()));
Assert.assertEquals(1, get(FlowletConnection.Type.FLOWLET, "Tokenizer", "CountByField").size());
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class MapReduceContextConfigTest method testManyMacrosInAppSpec.
@Test
public void testManyMacrosInAppSpec() {
Configuration hConf = new Configuration();
MapReduceContextConfig cfg = new MapReduceContextConfig(hConf);
StringBuilder appCfg = new StringBuilder();
for (int i = 0; i < 100; i++) {
appCfg.append("${").append(i).append("}");
hConf.setInt(String.valueOf(i), i);
}
ApplicationSpecification appSpec = new DefaultApplicationSpecification("name", "desc", appCfg.toString(), new ArtifactId("artifact", new ArtifactVersion("1.0.0"), ArtifactScope.USER), Collections.<String, StreamSpecification>emptyMap(), Collections.<String, String>emptyMap(), Collections.<String, DatasetCreationSpec>emptyMap(), Collections.<String, FlowSpecification>emptyMap(), Collections.<String, MapReduceSpecification>emptyMap(), Collections.<String, SparkSpecification>emptyMap(), Collections.<String, WorkflowSpecification>emptyMap(), Collections.<String, ServiceSpecification>emptyMap(), Collections.<String, ScheduleCreationSpec>emptyMap(), Collections.<String, WorkerSpecification>emptyMap(), Collections.<String, Plugin>emptyMap());
cfg.setApplicationSpecification(appSpec);
Assert.assertEquals(appSpec.getConfiguration(), cfg.getApplicationSpecification().getConfiguration());
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class WorkflowVerificationTest method testGoodWorkflow.
@Test
public void testGoodWorkflow() throws Exception {
ApplicationSpecification appSpec = Specifications.from(new GoodWorkflowApp());
verifyGoodWorkflowSpecifications(appSpec);
verifyAnotherGoodWorkflowSpecification(appSpec);
verifyWorkflowWithLocalDatasetSpecification(appSpec);
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
verifyGoodWorkflowSpecifications(newSpec);
verifyAnotherGoodWorkflowSpecification(newSpec);
verifyWorkflowWithLocalDatasetSpecification(newSpec);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class DefaultStoreTest method testWorkerInstances.
@Test
public void testWorkerInstances() throws Exception {
ApplicationSpecification spec = Specifications.from(new AppWithWorker());
ApplicationId appId = NamespaceId.DEFAULT.app(spec.getName());
store.addApplication(appId, spec);
ProgramId programId = appId.worker(AppWithWorker.WORKER);
int instancesFromSpec = spec.getWorkers().get(AppWithWorker.WORKER).getInstances();
Assert.assertEquals(1, instancesFromSpec);
int instances = store.getWorkerInstances(programId);
Assert.assertEquals(instancesFromSpec, instances);
store.setWorkerInstances(programId, 9);
instances = store.getWorkerInstances(programId);
Assert.assertEquals(9, instances);
}
use of co.cask.cdap.api.app.ApplicationSpecification in project cdap by caskdata.
the class DefaultStoreTest method testRunsLimit.
@Test
public void testRunsLimit() throws Exception {
ApplicationSpecification spec = Specifications.from(new AllProgramsApp());
ApplicationId appId = new ApplicationId("testRunsLimit", spec.getName());
store.addApplication(appId, spec);
ProgramId flowProgramId = new ProgramId("testRunsLimit", spec.getName(), ProgramType.FLOW, "NoOpFlow");
Assert.assertNotNull(store.getApplication(appId));
long now = System.currentTimeMillis();
ProgramRunId flowProgramRunId = flowProgramId.run(RunIds.generate());
setStartAndRunning(flowProgramRunId, now - 3000);
store.setStop(flowProgramRunId, now - 100, ProgramController.State.COMPLETED.getRunStatus(), AppFabricTestHelper.createSourceId(++sourceId));
setStartAndRunning(flowProgramId.run(RunIds.generate()), now - 2000);
// even though there's two separate run records (one that's complete and one that's active), only one should be
// returned by the query, because the limit parameter of 1 is being passed in.
Map<ProgramRunId, RunRecordMeta> historymap = store.getRuns(flowProgramId, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, 1);
Assert.assertEquals(1, historymap.size());
}
Aggregations