use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class BatchStreamIntegrationTestRun method submitAndVerifyStreamBatchJob.
private void submitAndVerifyStreamBatchJob(Class<? extends AbstractApplication> appClass, String streamWriter, String mapReduceName, int timeout) throws Exception {
ApplicationManager applicationManager = deployApplication(appClass);
StreamManager streamManager = getStreamManager(streamWriter);
verifyStreamBatchJob(streamManager, applicationManager, mapReduceName, timeout);
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class TestFrameworkTestRun method testBatchStreamUpload.
@Test
public void testBatchStreamUpload() throws Exception {
StreamManager batchStream = getStreamManager("batchStream");
batchStream.createStream();
String event1 = "this,is,some";
String event2 = "test,csv,data";
String event3 = "that,can,be,used,to,test";
String event4 = "batch,upload,capability";
String event5 = "for,streams in testbase";
File testData = TEMP_FOLDER.newFile("test-stream-data.txt");
try (FileWriter fileWriter = new FileWriter(testData);
BufferedWriter out = new BufferedWriter(fileWriter)) {
out.write(String.format("%s\n", event1));
out.write(String.format("%s\n", event2));
out.write(String.format("%s\n", event3));
out.write(String.format("%s\n", event4));
out.write(String.format("%s\n", event5));
}
// Batch upload file containing 10 events
batchStream.send(testData, "text/csv");
// Verify upload
List<StreamEvent> uploadedEvents = batchStream.getEvents(0, System.currentTimeMillis(), 100);
Assert.assertEquals(5, uploadedEvents.size());
Assert.assertEquals(event1, Bytes.toString(uploadedEvents.get(0).getBody()));
Assert.assertEquals(event2, Bytes.toString(uploadedEvents.get(1).getBody()));
Assert.assertEquals(event3, Bytes.toString(uploadedEvents.get(2).getBody()));
Assert.assertEquals(event4, Bytes.toString(uploadedEvents.get(3).getBody()));
Assert.assertEquals(event5, Bytes.toString(uploadedEvents.get(4).getBody()));
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class TestFrameworkTestRun method testMultiInput.
@Category(XSlowTests.class)
@Test(timeout = 240000)
public void testMultiInput() throws Exception {
ApplicationManager applicationManager = deployApplication(JoinMultiStreamApp.class);
FlowManager flowManager = applicationManager.getFlowManager("JoinMultiFlow").start();
StreamManager s1 = getStreamManager("s1");
StreamManager s2 = getStreamManager("s2");
StreamManager s3 = getStreamManager("s3");
s1.send("testing 1");
s2.send("testing 2");
s3.send("testing 3");
RuntimeMetrics terminalMetrics = flowManager.getFlowletMetrics("Terminal");
terminalMetrics.waitForProcessed(3, 60, TimeUnit.SECONDS);
TimeUnit.SECONDS.sleep(1);
ServiceManager queryManager = applicationManager.getServiceManager("QueryService").start();
queryManager.waitForStatus(true, 2, 1);
URL serviceURL = queryManager.getServiceURL();
Gson gson = new Gson();
Assert.assertEquals("testing 1", gson.fromJson(callServiceGet(serviceURL, "input1"), String.class));
Assert.assertEquals("testing 2", gson.fromJson(callServiceGet(serviceURL, "input2"), String.class));
Assert.assertEquals("testing 3", gson.fromJson(callServiceGet(serviceURL, "input3"), String.class));
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class TestFrameworkTestRun method testApp.
// todo: passing stream name as a workaround for not cleaning up streams during reset()
private void testApp(Class<? extends Application> app, String streamName) throws Exception {
ApplicationManager applicationManager = deployApplication(app);
FlowManager flowManager = applicationManager.getFlowManager("WordCountFlow").start();
// Send some inputs to streams
StreamManager streamManager = getStreamManager(streamName);
for (int i = 0; i < 100; i++) {
streamManager.send(ImmutableMap.of("title", "title " + i), "testing message " + i);
}
// Check the flowlet metrics
RuntimeMetrics flowletMetrics = flowManager.getFlowletMetrics("CountByField");
flowletMetrics.waitForProcessed(500, 10, TimeUnit.SECONDS);
Assert.assertEquals(0L, flowletMetrics.getException());
// Query the result
ServiceManager serviceManager = applicationManager.getServiceManager("WordFrequency").start();
serviceManager.waitForStatus(true, 2, 1);
// Verify the query result
Type resultType = new TypeToken<Map<String, Long>>() {
}.getType();
Map<String, Long> result = new Gson().fromJson(callServiceGet(serviceManager.getServiceURL(), "wordfreq/" + streamName + ":testing"), resultType);
Assert.assertNotNull(result);
Assert.assertEquals(100L, result.get(streamName + ":testing").longValue());
// check the metrics
RuntimeMetrics serviceMetrics = serviceManager.getMetrics();
serviceMetrics.waitForProcessed(1, 5, TimeUnit.SECONDS);
Assert.assertEquals(0L, serviceMetrics.getException());
// Run mapreduce job
MapReduceManager mrManager = applicationManager.getMapReduceManager("countTotal").start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 1800L, TimeUnit.SECONDS);
long totalCount = Long.valueOf(callServiceGet(serviceManager.getServiceURL(), "total"));
// every event has 5 tokens
Assert.assertEquals(5 * 100L, totalCount);
// Run mapreduce from stream
mrManager = applicationManager.getMapReduceManager("countFromStream").start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 120L, TimeUnit.SECONDS);
totalCount = Long.valueOf(callServiceGet(serviceManager.getServiceURL(), "stream_total"));
// The stream MR only consume the body, not the header.
Assert.assertEquals(3 * 100L, totalCount);
DataSetManager<MyKeyValueTableDefinition.KeyValueTable> mydatasetManager = getDataset("mydataset");
Assert.assertEquals(100L, Long.valueOf(mydatasetManager.get().get("title:title")).longValue());
// also test the deprecated version of getDataset(). This can be removed when we remove the method
mydatasetManager = getDataset("mydataset");
Assert.assertEquals(100L, Long.valueOf(mydatasetManager.get().get("title:title")).longValue());
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class TestFrameworkTestRun method testAppWithPlugin.
@Test
public void testAppWithPlugin() throws Exception {
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
addAppArtifact(artifactId, AppWithPlugin.class);
ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
ApplicationManager appManager = deployApplication(appId, createRequest);
final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
workerManager.start();
workerManager.waitForStatus(false, 5, 1);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return workerManager.getHistory(ProgramRunStatus.COMPLETED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
serviceManager.start();
serviceManager.waitForStatus(true, 1, 10);
URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
callServiceGet(serviceURL, "dummy");
serviceManager.stop();
serviceManager.waitForStatus(false, 1, 10);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return serviceManager.getHistory(ProgramRunStatus.KILLED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
workflowManager.start();
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
List<RunRecord> runRecords = workflowManager.getHistory();
Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
String value = Bytes.toString(workflowTableManager.get().read("val"));
Assert.assertEquals(AppWithPlugin.TEST, value);
Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
// Testing Spark Plugins. First send some data to stream for the Spark program to process
StreamManager streamManager = getStreamManager(AppWithPlugin.SPARK_STREAM);
for (int i = 0; i < 5; i++) {
streamManager.send("Message " + i);
}
SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
// Verify the Spark result.
DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
Table table = dataSetManager.get();
try (Scanner scanner = table.scan(null, null)) {
for (int i = 0; i < 5; i++) {
Row row = scanner.next();
Assert.assertNotNull(row);
String expected = "Message " + i + " " + AppWithPlugin.TEST;
Assert.assertEquals(expected, Bytes.toString(row.getRow()));
Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
}
// There shouldn't be any more rows in the table.
Assert.assertNull(scanner.next());
}
}
Aggregations