use of co.cask.cdap.proto.artifact.AppRequest in project cdap by caskdata.
the class ApplicationClientTestRun method testAppUpdate.
@Test
public void testAppUpdate() throws Exception {
String artifactName = "cfg-programs";
ArtifactId artifactIdV1 = NamespaceId.DEFAULT.artifact(artifactName, "1.0.0");
ArtifactId artifactIdV2 = NamespaceId.DEFAULT.artifact(artifactName, "2.0.0");
ApplicationId appId = NamespaceId.DEFAULT.app("ProgramsApp");
artifactClient.add(NamespaceId.DEFAULT, artifactName, Files.newInputStreamSupplier(createAppJarFile(ConfigurableProgramsApp.class)), "1.0.0");
artifactClient.add(NamespaceId.DEFAULT, artifactName, Files.newInputStreamSupplier(createAppJarFile(ConfigurableProgramsApp2.class)), "2.0.0");
try {
// deploy the app with just the worker
ConfigurableProgramsApp.Programs conf = new ConfigurableProgramsApp.Programs(null, "worker1", "stream1", "dataset1");
AppRequest<ConfigurableProgramsApp.Programs> request = new AppRequest<>(new ArtifactSummary(artifactIdV1.getArtifact(), artifactIdV1.getVersion()), conf);
appClient.deploy(appId, request);
// should only have the worker
Assert.assertTrue(appClient.listPrograms(appId, ProgramType.FLOW).isEmpty());
Assert.assertEquals(1, appClient.listPrograms(appId, ProgramType.WORKER).size());
// update to use just the flow
conf = new ConfigurableProgramsApp.Programs("flow1", null, "stream1", "dataset1");
request = new AppRequest<>(new ArtifactSummary(artifactIdV1.getArtifact(), artifactIdV1.getVersion()), conf);
appClient.update(appId, request);
// should only have the flow
Assert.assertTrue(appClient.listPrograms(appId, ProgramType.WORKER).isEmpty());
Assert.assertEquals(1, appClient.listPrograms(appId, ProgramType.FLOW).size());
// check nonexistent app is not found
try {
appClient.update(NamespaceId.DEFAULT.app("ghost"), request);
Assert.fail();
} catch (NotFoundException e) {
// expected
}
// check different artifact name is invalid
request = new AppRequest<>(new ArtifactSummary("ghost", artifactIdV1.getVersion()), conf);
try {
appClient.update(appId, request);
Assert.fail();
} catch (BadRequestException e) {
// expected
}
// check nonexistent artifact is not found
request = new AppRequest<>(new ArtifactSummary(artifactIdV1.getArtifact(), "0.0.1"), conf);
try {
appClient.update(appId, request);
Assert.fail();
} catch (NotFoundException e) {
// expected
}
// update artifact version. This version uses a different app class with that can add a service
ConfigurableProgramsApp2.Programs conf2 = new ConfigurableProgramsApp2.Programs(null, null, "stream1", "dataset1", "service2");
AppRequest<ConfigurableProgramsApp2.Programs> request2 = new AppRequest<>(new ArtifactSummary(artifactIdV2.getArtifact(), artifactIdV2.getVersion()), conf2);
appClient.update(appId, request2);
// should only have a single service
Assert.assertTrue(appClient.listPrograms(appId, ProgramType.WORKER).isEmpty());
Assert.assertTrue(appClient.listPrograms(appId, ProgramType.FLOW).isEmpty());
Assert.assertEquals(1, appClient.listPrograms(appId, ProgramType.SERVICE).size());
} finally {
appClient.delete(appId);
appClient.waitForDeleted(appId, 30, TimeUnit.SECONDS);
artifactClient.delete(artifactIdV1);
artifactClient.delete(artifactIdV2);
}
}
use of co.cask.cdap.proto.artifact.AppRequest in project cdap by caskdata.
the class DataQualityAppTest method testMeanContentLength.
@Test
public void testMeanContentLength() throws Exception {
Map<String, Set<String>> testMap = new HashMap<>();
Set<String> testSet = new HashSet<>();
testSet.add("Mean");
testMap.put("content_length", testSet);
DataQualityApp.DataQualityConfig config = new DataQualityApp.DataQualityConfig(WORKFLOW_SCHEDULE_MINUTES, getStreamSource(), "avg", testMap);
ApplicationId appId = NamespaceId.DEFAULT.app("newApp2");
AppRequest<DataQualityApp.DataQualityConfig> appRequest = new AppRequest<>(new ArtifactSummary(appArtifact.getArtifact(), appArtifact.getVersion()), config);
ApplicationManager applicationManager = deployApplication(appId, appRequest);
MapReduceManager mrManager = applicationManager.getMapReduceManager("FieldAggregator").start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 180, TimeUnit.SECONDS);
ServiceManager serviceManager = applicationManager.getServiceManager(DataQualityService.SERVICE_NAME).start();
serviceManager.waitForStatus(true);
/* Test for aggregationsGetter handler */
URL url = new URL(serviceManager.getServiceURL(), "v1/sources/logStream/fields/content_length/aggregations/Mean/timeseries");
HttpResponse httpResponse = HttpRequests.execute(HttpRequest.get(url).build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, httpResponse.getResponseCode());
String response = httpResponse.getResponseBodyAsString();
List<TimestampValue> tsValueListActual = GSON.fromJson(response, TOKEN_TYPE_LIST_TIMESTAMP_VALUE);
TimestampValue firstTimestampValue = tsValueListActual.get(0);
Assert.assertEquals(256.0, firstTimestampValue.getValue());
serviceManager.stop();
serviceManager.waitForRun(ProgramRunStatus.KILLED, 180, TimeUnit.SECONDS);
}
use of co.cask.cdap.proto.artifact.AppRequest in project cdap by caskdata.
the class DataQualityAppTest method testTotals.
@Test
public void testTotals() throws Exception {
Map<String, Set<String>> testMap = new HashMap<>();
Set<String> testSet = new HashSet<>();
testSet.add("DiscreteValuesHistogram");
testMap.put("content_length", testSet);
testMap.put("status", testSet);
testMap.put("request_time", testSet);
DataQualityApp.DataQualityConfig config = new DataQualityApp.DataQualityConfig(WORKFLOW_SCHEDULE_MINUTES, getStreamSource(), "histogram", testMap);
ApplicationId appId = NamespaceId.DEFAULT.app("newApp3");
AppRequest<DataQualityApp.DataQualityConfig> appRequest = new AppRequest<>(new ArtifactSummary(appArtifact.getArtifact(), appArtifact.getVersion()), config);
ApplicationManager applicationManager = deployApplication(appId, appRequest);
MapReduceManager mrManager = applicationManager.getMapReduceManager("FieldAggregator").start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 180, TimeUnit.SECONDS);
Map<String, Integer> expectedMap = new HashMap<>();
expectedMap.put("256", 3);
/* Test for the aggregationsGetter handler */
ServiceManager serviceManager = applicationManager.getServiceManager(DataQualityService.SERVICE_NAME).start();
serviceManager.waitForStatus(true);
URL url = new URL(serviceManager.getServiceURL(), "v1/sources/logStream/fields/content_length/aggregations/DiscreteValuesHistogram/totals");
HttpResponse httpResponse = HttpRequests.execute(HttpRequest.get(url).build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, httpResponse.getResponseCode());
String response = httpResponse.getResponseBodyAsString();
Map<String, Integer> histogramMap = GSON.fromJson(response, TOKEN_TYPE_MAP_STRING_INTEGER);
Assert.assertEquals(expectedMap, histogramMap);
/* Test for the fieldsGetter handler */
url = new URL(serviceManager.getServiceURL(), "v1/sources/logStream/fields");
httpResponse = HttpRequests.execute(HttpRequest.get(url).build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, httpResponse.getResponseCode());
response = httpResponse.getResponseBodyAsString();
Set<FieldDetail> outputSet = GSON.fromJson(response, TOKEN_TYPE_SET_FIELD_DETAIL);
Set<FieldDetail> expectedSet = new HashSet<>();
AggregationTypeValue aggregationTypeValue = new AggregationTypeValue("DiscreteValuesHistogram", true);
Set<AggregationTypeValue> aggregationTypeValuesList = Sets.newHashSet(aggregationTypeValue);
expectedSet.add(new FieldDetail("content_length", aggregationTypeValuesList));
expectedSet.add(new FieldDetail("request_time", aggregationTypeValuesList));
expectedSet.add(new FieldDetail("status", aggregationTypeValuesList));
Assert.assertEquals(expectedSet, outputSet);
/* Test for the aggregationTypesGetter handler */
url = new URL(serviceManager.getServiceURL(), "v1/sources/logStream/fields/content_length");
httpResponse = HttpRequests.execute(HttpRequest.get(url).build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, httpResponse.getResponseCode());
response = httpResponse.getResponseBodyAsString();
List<AggregationTypeValue> expectedAggregationTypeValuesList = new ArrayList<>();
List<AggregationTypeValue> outputAggregationTypeValuesList = GSON.fromJson(response, TOKEN_TYPE_LIST_AGGREGATION_TYPE_VALUES);
expectedAggregationTypeValuesList.add(new AggregationTypeValue("DiscreteValuesHistogram", true));
Assert.assertEquals(expectedAggregationTypeValuesList, outputAggregationTypeValuesList);
serviceManager.stop();
serviceManager.waitForRun(ProgramRunStatus.KILLED, 180, TimeUnit.SECONDS);
}
use of co.cask.cdap.proto.artifact.AppRequest in project cdap by caskdata.
the class ETLWorkerTest method testOneSourceOneSink.
@Test
@Category(SlowTests.class)
public void testOneSourceOneSink() throws Exception {
Schema schema = Schema.recordOf("test", Schema.Field.of("id", Schema.of(Schema.Type.STRING)), Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
List<StructuredRecord> input = new ArrayList<>();
input.add(StructuredRecord.builder(schema).set("id", "123").set("name", "samuel").build());
input.add(StructuredRecord.builder(schema).set("id", "456").set("name", "jackson").build());
File tmpDir = TMP_FOLDER.newFolder();
ETLRealtimeConfig etlConfig = ETLRealtimeConfig.builder().addStage(new ETLStage("source", MockSource.getPlugin(input))).addStage(new ETLStage("sink", MockSink.getPlugin(tmpDir))).addConnection("source", "sink").build();
ApplicationId appId = NamespaceId.DEFAULT.app("simpleApp");
AppRequest<ETLRealtimeConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationManager appManager = deployApplication(appId, appRequest);
WorkerManager workerManager = appManager.getWorkerManager(ETLWorker.NAME);
workerManager.start();
workerManager.waitForStatus(true, 10, 1);
try {
List<StructuredRecord> written = MockSink.getRecords(tmpDir, 0, 10, TimeUnit.SECONDS);
Assert.assertEquals(input, written);
} finally {
stopWorker(workerManager);
}
validateMetric(2, appId, "source.records.out");
validateMetric(2, appId, "sink.records.in");
}
use of co.cask.cdap.proto.artifact.AppRequest in project cdap by caskdata.
the class ETLWorkerTest method testLookup.
@Test
public void testLookup() throws Exception {
addDatasetInstance(KeyValueTable.class.getName(), "lookupTable");
DataSetManager<KeyValueTable> lookupTable = getDataset("lookupTable");
lookupTable.get().write("Bob".getBytes(Charsets.UTF_8), "123".getBytes(Charsets.UTF_8));
lookupTable.flush();
File outDir = TMP_FOLDER.newFolder();
ETLRealtimeConfig etlConfig = ETLRealtimeConfig.builder().addStage(new ETLStage("source", LookupSource.getPlugin(ImmutableSet.of("Bob", "Bill"), "lookupTable"))).addStage(new ETLStage("sink", MockSink.getPlugin(outDir))).addConnection("source", "sink").build();
ApplicationId appId = NamespaceId.DEFAULT.app("lookupTestApp");
AppRequest<ETLRealtimeConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationManager appManager = deployApplication(appId, appRequest);
WorkerManager workerManager = appManager.getWorkerManager(ETLWorker.NAME);
workerManager.start();
workerManager.waitForStatus(true, 10, 1);
Schema schema = Schema.recordOf("bobbill", Schema.Field.of("Bob", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("Bill", Schema.nullableOf(Schema.of(Schema.Type.STRING))));
List<StructuredRecord> expected = new ArrayList<>();
expected.add(StructuredRecord.builder(schema).set("Bob", "123").build());
try {
List<StructuredRecord> actual = MockSink.getRecords(outDir, 0, 10, TimeUnit.SECONDS);
Assert.assertEquals(expected, actual);
} finally {
stopWorker(workerManager);
}
validateMetric(1, appId, "source.records.out");
validateMetric(1, appId, "sink.records.in");
}
Aggregations