use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkPageRankAppTest method test.
@Test
public void test() throws Exception {
// Deploy the SparkPageRankApp
ApplicationManager appManager = deployApplication(SparkPageRankApp.class);
// Send a stream events to the Stream
StreamManager streamManager = getStreamManager(SparkPageRankApp.BACKLINK_URL_STREAM);
streamManager.send(Joiner.on(" ").join(URL_1, URL_2));
streamManager.send(Joiner.on(" ").join(URL_1, URL_3));
streamManager.send(Joiner.on(" ").join(URL_2, URL_1));
streamManager.send(Joiner.on(" ").join(URL_3, URL_1));
// Start service
ServiceManager serviceManager = appManager.getServiceManager(SparkPageRankApp.SERVICE_HANDLERS).start();
// Wait for service to start since the Spark program needs it
serviceManager.waitForStatus(true);
// Start the SparkPageRankProgram
SparkManager sparkManager = appManager.getSparkManager(SparkPageRankApp.PageRankSpark.class.getSimpleName()).start();
sparkManager.waitForFinish(60, TimeUnit.SECONDS);
// Run RanksCounter which will count the number of pages for a pr
MapReduceManager mapReduceManager = appManager.getMapReduceManager(SparkPageRankApp.RanksCounter.class.getSimpleName()).start();
mapReduceManager.waitForFinish(3, TimeUnit.MINUTES);
// Query for rank
URL url = new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), SparkPageRankApp.SparkPageRankServiceHandler.RANKS_PATH);
HttpRequest request = HttpRequest.post(url).withBody(("{\"" + SparkPageRankApp.SparkPageRankServiceHandler.URL_KEY + "\":\"" + URL_1 + "\"}")).build();
HttpResponse response = HttpRequests.execute(request);
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Assert.assertEquals(RANK, response.getResponseBodyAsString());
// Request total pages for a page rank and verify it
url = new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), SparkPageRankApp.SparkPageRankServiceHandler.TOTAL_PAGES_PATH + "/" + RANK);
response = HttpRequests.execute(HttpRequest.get(url).build());
Assert.assertEquals(TOTAL_PAGES, response.getResponseBodyAsString());
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class HelloWorldTest method test.
@Test
public void test() throws Exception {
// Deploy the HelloWorld application
ApplicationManager appManager = deployApplication(HelloWorld.class);
// Start WhoFlow
FlowManager flowManager = appManager.getFlowManager("WhoFlow").start();
flowManager.waitForStatus(true);
// Send stream events to the "who" Stream
StreamManager streamManager = getStreamManager("who");
streamManager.send("1");
streamManager.send("2");
streamManager.send("3");
streamManager.send("4");
streamManager.send("5");
try {
// Wait for the last Flowlet processing 5 events, or at most 5 seconds
RuntimeMetrics metrics = flowManager.getFlowletMetrics("saver");
metrics.waitForProcessed(5, 5, TimeUnit.SECONDS);
} finally {
flowManager.stop();
Assert.assertFalse(flowManager.isRunning());
}
// Start Greeting service and use it
ServiceManager serviceManager = appManager.getServiceManager(HelloWorld.Greeting.SERVICE_NAME).start();
// Wait service startup
serviceManager.waitForStatus(true);
URL url = new URL(serviceManager.getServiceURL(), "greet");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
String response;
try {
response = new String(ByteStreams.toByteArray(connection.getInputStream()), Charsets.UTF_8);
} finally {
connection.disconnect();
}
Assert.assertEquals("Hello 5!", response);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTestRun method testSparkFork.
@Test
public void testSparkFork() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
File barrierDir = TMP_FOLDER.newFolder();
final WorkflowManager workflowManager = appManager.getWorkflowManager(TestSparkApp.ForkSparkWorkflow.class.getSimpleName()).start(Collections.singletonMap("barrier.dir", barrierDir.getAbsolutePath()));
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTestRun method testTransaction.
@Test
public void testTransaction() throws Exception {
ApplicationManager applicationManager = deploy(TestSparkApp.class);
StreamManager streamManager = getStreamManager("SparkStream");
// Write some sentences to the stream
streamManager.send("red fox");
streamManager.send("brown fox");
streamManager.send("grey fox");
streamManager.send("brown bear");
streamManager.send("black bear");
// Run the spark program
SparkManager sparkManager = applicationManager.getSparkManager(TransactionSpark.class.getSimpleName());
sparkManager.start(ImmutableMap.of("source.stream", "SparkStream", "keyvalue.table", "KeyValueTable", "result.all.dataset", "SparkResult", "result.threshold", "2", "result.threshold.dataset", "SparkThresholdResult"));
// Verify result from dataset before the Spark program terminates
final DataSetManager<KeyValueTable> resultManager = getDataset("SparkThresholdResult");
final KeyValueTable resultTable = resultManager.get();
// Expect the threshold result dataset, with threshold >=2, contains [brown, fox, bear]
Tasks.waitFor(ImmutableSet.of("brown", "fox", "bear"), new Callable<Set<String>>() {
@Override
public Set<String> call() throws Exception {
// This is to start a new TX
resultManager.flush();
LOG.info("Reading from threshold result");
try (CloseableIterator<KeyValue<byte[], byte[]>> itor = resultTable.scan(null, null)) {
return ImmutableSet.copyOf(Iterators.transform(itor, new Function<KeyValue<byte[], byte[]>, String>() {
@Override
public String apply(KeyValue<byte[], byte[]> input) {
String word = Bytes.toString(input.getKey());
LOG.info("{}, {}", word, Bytes.toInt(input.getValue()));
return word;
}
}));
}
}
}, 3, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
sparkManager.stop();
sparkManager.waitForRun(ProgramRunStatus.KILLED, 60, TimeUnit.SECONDS);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTestRun method testSparkWithObjectStore.
@Test
public void testSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(CharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
// validate that the table emitted metrics
// one read + one write in beforeSubmit(), increment (= read + write) in main -> 4
Tasks.waitFor(4L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, SparkAppUsingObjectStore.class.getSimpleName(), Constants.Metrics.Tag.SPARK, CharCountProgram.class.getSimpleName(), Constants.Metrics.Tag.DATASET, "totals"), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 10L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
Aggregations