use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class ServiceArtifactTestRun method testServiceArtifact.
@Test
public void testServiceArtifact() throws Exception {
ApplicationManager appManager = deployWithArtifact(ServiceArtifactApp.class, artifactJar);
ServiceManager serviceManager = appManager.getServiceManager("artifact").start();
URL serviceURL = serviceManager.getServiceURL(30, TimeUnit.SECONDS);
Assert.assertNotNull(serviceURL);
URL listURL = serviceURL.toURI().resolve("list").toURL();
try (Reader reader = new InputStreamReader(listURL.openStream(), StandardCharsets.UTF_8)) {
List<ArtifactInfo> artifacts = new Gson().fromJson(reader, new TypeToken<List<ArtifactInfo>>() {
}.getType());
// It should have the test app, and two plugin artifacts
Assert.assertEquals(3, artifacts.size());
Assert.assertTrue(artifacts.stream().anyMatch(info -> info.getName().equals(ServiceArtifactApp.class.getSimpleName())));
Assert.assertTrue(artifacts.stream().anyMatch(info -> info.getName().equals("dummybase")));
Assert.assertTrue(artifacts.stream().anyMatch(info -> info.getName().equals("dummy")));
}
URL loadURL = serviceURL.toURI().resolve("load?parent=dummybase&plugin=dummy&class=" + DummyPlugin.class.getName()).toURL();
HttpURLConnection urlConn = (HttpURLConnection) loadURL.openConnection();
Assert.assertEquals(200, urlConn.getResponseCode());
try (Reader reader = new InputStreamReader(urlConn.getInputStream(), StandardCharsets.UTF_8)) {
Assert.assertEquals(DummyPlugin.class.getName(), CharStreams.toString(reader));
}
serviceManager.stop();
serviceManager.waitForStatus(false);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class BatchStreamIntegrationTestRun method submitAndVerifyStreamBatchJob.
private void submitAndVerifyStreamBatchJob(Class<? extends AbstractApplication> appClass, String streamWriter, String mapReduceName, int timeout) throws Exception {
ApplicationManager applicationManager = deployApplication(appClass);
StreamManager streamManager = getStreamManager(streamWriter);
verifyStreamBatchJob(streamManager, applicationManager, mapReduceName, timeout);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class MapReduceServiceIntegrationTestRun method test.
@Test
public void test() throws Exception {
ApplicationManager applicationManager = deployApplication(TestMapReduceServiceIntegrationApp.class);
ServiceManager serviceManager = applicationManager.getServiceManager(TestMapReduceServiceIntegrationApp.SERVICE_NAME).start();
serviceManager.waitForStatus(true);
DataSetManager<MyKeyValueTableDefinition.KeyValueTable> inDataSet = getDataset(TestMapReduceServiceIntegrationApp.INPUT_DATASET);
inDataSet.get().write("key1", "Two words");
inDataSet.get().write("key2", "Plus three words");
inDataSet.flush();
MapReduceManager mrManager = applicationManager.getMapReduceManager(TestMapReduceServiceIntegrationApp.MR_NAME).start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 180, TimeUnit.SECONDS);
DataSetManager<MyKeyValueTableDefinition.KeyValueTable> outDataSet = getDataset(TestMapReduceServiceIntegrationApp.OUTPUT_DATASET);
MyKeyValueTableDefinition.KeyValueTable results = outDataSet.get();
String total = results.get(TestMapReduceServiceIntegrationApp.SQUARED_TOTAL_WORDS_COUNT);
Assert.assertEquals(25, Integer.parseInt(total));
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkMetricsIntegrationTestRun method testSparkMetrics.
@Test
public void testSparkMetrics() throws Exception {
ApplicationManager applicationManager = deployApplication(TestSparkMetricsIntegrationApp.class);
SparkManager sparkManager = applicationManager.getSparkManager(TestSparkMetricsIntegrationApp.APP_SPARK_NAME).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
List<RunRecord> history = sparkManager.getHistory(ProgramRunStatus.COMPLETED);
Assert.assertEquals(1, history.size());
// Wait for the metrics to get updated
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return getSparkMetric(TestSparkMetricsIntegrationApp.APP_NAME, TestSparkMetricsIntegrationApp.APP_SPARK_NAME, "system.driver.BlockManager.memory.remainingMem_MB") > 0;
}
}, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
Tasks.waitFor(2L, new Callable<Long>() {
@Override
public Long call() throws Exception {
return getSparkMetric(TestSparkMetricsIntegrationApp.APP_NAME, TestSparkMetricsIntegrationApp.APP_SPARK_NAME, "user.more.than.30");
}
}, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkStreamIntegrationTestRun method testSparkWithStream.
@Test
public void testSparkWithStream() throws Exception {
ApplicationManager applicationManager = deployApplication(TestSparkStreamIntegrationApp.class);
StreamManager streamManager = getStreamManager("testStream");
for (int i = 0; i < 50; i++) {
streamManager.send(String.valueOf(i));
}
SparkManager sparkManager = applicationManager.getSparkManager("SparkStreamProgram").start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
// The Spark job simply turns every stream event body into key/value pairs, with key==value.
DataSetManager<KeyValueTable> datasetManager = getDataset("result");
verifyDatasetResult(datasetManager);
}
Aggregations