use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class WebAnalyticsTest method testWebAnalytics.
@Test
public void testWebAnalytics() throws Exception {
// Deploy the Application
ApplicationManager appManager = deployApplication(WebAnalytics.class);
// Start the Flow
FlowManager flowManager = appManager.getFlowManager("WebAnalyticsFlow").start();
// Send events to the Stream
StreamManager streamManager = getStreamManager("log");
BufferedReader reader = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream("/access.log"), "UTF-8"));
int lines = 0;
try {
String line = reader.readLine();
while (line != null) {
streamManager.send(line);
lines++;
line = reader.readLine();
}
} finally {
reader.close();
}
// Wait for the flow to process all data
RuntimeMetrics flowletMetrics = flowManager.getFlowletMetrics("UniqueVisitor");
flowletMetrics.waitForProcessed(lines, 10, TimeUnit.SECONDS);
// Verify the unique count
UniqueVisitCount uniqueVisitCount = this.<UniqueVisitCount>getDataset("UniqueVisitCount").get();
Assert.assertEquals(3L, uniqueVisitCount.getCount("192.168.12.72"));
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class SparkKMeansAppTest method test.
@Test
public void test() throws Exception {
// Deploy the Application
ApplicationManager appManager = deployApplication(SparkKMeansApp.class);
// Start the Flow
FlowManager flowManager = appManager.getFlowManager("PointsFlow").start();
// Send a few points to the stream
StreamManager streamManager = getStreamManager("pointsStream");
// one cluster around (0, 500, 0) and another around (100, 0, 0)
for (int i = 0; i < 100; i++) {
double diff = Math.random() / 100;
streamManager.send(String.format("%f %f %f", diff, 500 + diff, diff));
streamManager.send(String.format("%f %f %f", 100 + diff, diff, diff));
}
// Wait for the events to be processed, or at most 5 seconds
RuntimeMetrics metrics = flowManager.getFlowletMetrics("reader");
metrics.waitForProcessed(200, 10, TimeUnit.SECONDS);
// Start a Spark Program
SparkManager sparkManager = appManager.getSparkManager("SparkKMeansProgram").start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
flowManager.stop();
// Start CentersService
ServiceManager serviceManager = appManager.getServiceManager(SparkKMeansApp.CentersService.SERVICE_NAME).start();
// Wait service startup
serviceManager.waitForStatus(true);
// Request data and verify it
String response = requestService(new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), "centers/0"));
String[] coordinates = response.split(",");
int x0 = Double.valueOf(coordinates[0]).intValue();
int y0 = Double.valueOf(coordinates[1]).intValue();
int z0 = Double.valueOf(coordinates[2]).intValue();
response = requestService(new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), "centers/1"));
coordinates = response.split(",");
int x1 = Double.valueOf(coordinates[0]).intValue();
int y1 = Double.valueOf(coordinates[1]).intValue();
int z1 = Double.valueOf(coordinates[2]).intValue();
// one cluster should be around (0, 500, 0) and the other around (100, 0, 0)
if (x0 == 100) {
Assert.assertEquals(0, y0);
Assert.assertEquals(0, z0);
Assert.assertEquals(0, x1);
Assert.assertEquals(500, y1);
Assert.assertEquals(0, z1);
} else {
Assert.assertEquals(0, x0);
Assert.assertEquals(500, y0);
Assert.assertEquals(0, z0);
Assert.assertEquals(100, x1);
Assert.assertEquals(0, y1);
Assert.assertEquals(0, z1);
}
// Request data by incorrect index and verify response
URL url = new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), "centers/10");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
try {
Assert.assertEquals(HttpURLConnection.HTTP_NO_CONTENT, conn.getResponseCode());
} finally {
conn.disconnect();
}
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class StreamConversionTest method testStreamConversion.
@Test
public void testStreamConversion() throws Exception {
// Deploy the PurchaseApp application
ApplicationManager appManager = deployApplication(StreamConversionApp.class);
// send some data to the events stream
StreamManager streamManager = getStreamManager("events");
streamManager.send("15");
streamManager.send("16");
streamManager.send("17");
// record the current time. Add 1 in case the stream events are added with the same timestamp as the current time.
final long startTime = System.currentTimeMillis() + 1;
// run the mapreduce
MapReduceManager mapReduceManager = appManager.getMapReduceManager("StreamConversionMapReduce").start(ImmutableMap.of("logical.start.time", Long.toString(startTime)));
mapReduceManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
// verify the single partition in the file set
DataSetManager<TimePartitionedFileSet> fileSetManager = getDataset("converted");
Assert.assertNotNull(fileSetManager.get().getPartitionByTime(startTime));
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(startTime);
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH) + 1;
int day = calendar.get(Calendar.DAY_OF_MONTH);
int hour = calendar.get(Calendar.HOUR_OF_DAY);
int minute = calendar.get(Calendar.MINUTE);
// query with SQL
Connection connection = getQueryClient();
ResultSet results = connection.prepareStatement("SELECT year, month, day, hour, minute " + "FROM dataset_converted " + "WHERE body = '17'").executeQuery();
// should return only one row, with correct time fields
Assert.assertTrue(results.next());
Assert.assertEquals(year, results.getInt(1));
Assert.assertEquals(month, results.getInt(2));
Assert.assertEquals(day, results.getInt(3));
Assert.assertEquals(hour, results.getInt(4));
Assert.assertEquals(minute, results.getInt(5));
Assert.assertFalse(results.next());
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class SparkKMeansAppTest method test.
@Test
public void test() throws Exception {
// Deploy the Application
ApplicationManager appManager = deployApplication(SparkKMeansApp.class);
// Start the Flow
FlowManager flowManager = appManager.getFlowManager("PointsFlow").start();
// Send a few points to the stream
StreamManager streamManager = getStreamManager("pointsStream");
streamManager.send("10.6 519.2 110.3");
streamManager.send("10.6 518.1 110.1");
streamManager.send("10.6 519.6 109.9");
streamManager.send("10.6 517.9 108.9");
streamManager.send("10.7 518 109.2");
// Wait for the events to be processed, or at most 5 seconds
RuntimeMetrics metrics = flowManager.getFlowletMetrics("reader");
metrics.waitForProcessed(3, 5, TimeUnit.SECONDS);
// Start a Spark Program
SparkManager sparkManager = appManager.getSparkManager("SparkKMeansProgram").start();
sparkManager.waitForFinish(60, TimeUnit.SECONDS);
flowManager.stop();
// Start CentersService
ServiceManager serviceManager = appManager.getServiceManager(SparkKMeansApp.CentersService.SERVICE_NAME).start();
// Wait service startup
serviceManager.waitForStatus(true);
// Request data and verify it
String response = requestService(new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), "centers/1"));
String[] coordinates = response.split(",");
Assert.assertTrue(coordinates.length == 3);
for (String coordinate : coordinates) {
double value = Double.parseDouble(coordinate);
Assert.assertTrue(value > 0);
}
// Request data by incorrect index and verify response
URL url = new URL(serviceManager.getServiceURL(15, TimeUnit.SECONDS), "centers/10");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
try {
Assert.assertEquals(HttpURLConnection.HTTP_NO_CONTENT, conn.getResponseCode());
} finally {
conn.disconnect();
}
}
use of co.cask.cdap.test.StreamManager in project cdap by caskdata.
the class SparkTestRun method testTransaction.
@Test
public void testTransaction() throws Exception {
ApplicationManager applicationManager = deploy(TestSparkApp.class);
StreamManager streamManager = getStreamManager("SparkStream");
// Write some sentences to the stream
streamManager.send("red fox");
streamManager.send("brown fox");
streamManager.send("grey fox");
streamManager.send("brown bear");
streamManager.send("black bear");
// Run the spark program
SparkManager sparkManager = applicationManager.getSparkManager(TransactionSpark.class.getSimpleName());
sparkManager.start(ImmutableMap.of("source.stream", "SparkStream", "keyvalue.table", "KeyValueTable", "result.all.dataset", "SparkResult", "result.threshold", "2", "result.threshold.dataset", "SparkThresholdResult"));
// Verify result from dataset before the Spark program terminates
final DataSetManager<KeyValueTable> resultManager = getDataset("SparkThresholdResult");
final KeyValueTable resultTable = resultManager.get();
// Expect the threshold result dataset, with threshold >=2, contains [brown, fox, bear]
Tasks.waitFor(ImmutableSet.of("brown", "fox", "bear"), new Callable<Set<String>>() {
@Override
public Set<String> call() throws Exception {
// This is to start a new TX
resultManager.flush();
LOG.info("Reading from threshold result");
try (CloseableIterator<KeyValue<byte[], byte[]>> itor = resultTable.scan(null, null)) {
return ImmutableSet.copyOf(Iterators.transform(itor, new Function<KeyValue<byte[], byte[]>, String>() {
@Override
public String apply(KeyValue<byte[], byte[]> input) {
String word = Bytes.toString(input.getKey());
LOG.info("{}, {}", word, Bytes.toInt(input.getValue()));
return word;
}
}));
}
}
}, 3, TimeUnit.MINUTES, 1, TimeUnit.SECONDS);
sparkManager.stop();
sparkManager.waitForRun(ProgramRunStatus.KILLED, 60, TimeUnit.SECONDS);
}
Aggregations