use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.
the class SparkTest method testSparkProgramStatusSchedule.
@Test
public void testSparkProgramStatusSchedule() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
ScheduleId scheduleId = new ScheduleId(NamespaceId.DEFAULT.getNamespace(), TestSparkApp.class.getSimpleName(), "schedule");
appManager.enableSchedule(scheduleId);
WorkflowManager workflowManager = appManager.getWorkflowManager(TestSparkApp.TriggeredWorkflow.class.getSimpleName());
int numRuns = workflowManager.getHistory(ProgramRunStatus.COMPLETED).size();
// Start the upstream program
SparkManager sparkManager = appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName());
sparkManager.start();
// Wait for the downstream to complete
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
// Run again with the kryo serializer
sparkManager.start(Collections.singletonMap("spark.serializer", "org.apache.spark.serializer.KryoSerializer"));
// Wait for the downstream to complete again
workflowManager.waitForRuns(ProgramRunStatus.COMPLETED, numRuns + 2, 5, TimeUnit.MINUTES);
}
use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.
the class SparkMetricsIntegrationTestRun method testSparkMetrics.
@Test
public void testSparkMetrics() throws Exception {
ApplicationManager applicationManager = deployApplication(TestSparkMetricsIntegrationApp.class);
SparkManager sparkManager = applicationManager.getSparkManager(TestSparkMetricsIntegrationApp.APP_SPARK_NAME).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
List<RunRecord> history = sparkManager.getHistory(ProgramRunStatus.COMPLETED);
Assert.assertEquals(1, history.size());
// Wait for the metrics to get updated
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return getSparkMetric(TestSparkMetricsIntegrationApp.APP_NAME, TestSparkMetricsIntegrationApp.APP_SPARK_NAME, "system.driver.BlockManager.memory.remainingMem_MB") > 0;
}
}, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
Tasks.waitFor(2L, new Callable<Long>() {
@Override
public Long call() throws Exception {
return getSparkMetric(TestSparkMetricsIntegrationApp.APP_NAME, TestSparkMetricsIntegrationApp.APP_SPARK_NAME, "user.more.than.30");
}
}, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.
the class SparkServiceIntegrationTestRun method testSparkWithService.
@Test
public void testSparkWithService() throws Exception {
ApplicationManager applicationManager = deployApplication(TestSparkServiceIntegrationApp.class);
startService(applicationManager);
SparkManager sparkManager = applicationManager.getSparkManager(TestSparkServiceIntegrationApp.SparkServiceProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
DataSetManager<KeyValueTable> datasetManager = getDataset("result");
KeyValueTable results = datasetManager.get();
for (int i = 1; i <= 5; i++) {
byte[] key = String.valueOf(i).getBytes(Charsets.UTF_8);
Assert.assertEquals((i * i), Integer.parseInt(Bytes.toString(results.read(key))));
}
}
use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.
the class Spark2Test method testScalaSparkWithObjectStore.
@Test
public void testScalaSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(ScalaCharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
sparkManager.waitForStopped(60, TimeUnit.SECONDS);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
}
use of io.cdap.cdap.test.SparkManager in project cdap by caskdata.
the class Spark2Test method testSpark2Service.
@Test
public void testSpark2Service() throws Exception {
ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, Spark2TestApp.class);
SparkManager manager = applicationManager.getSparkManager(ScalaSparkServiceProgram.class.getSimpleName()).start();
URL url = manager.getServiceURL(5, TimeUnit.MINUTES);
Assert.assertNotNull(url);
// GET request to sum n numbers.
URL sumURL = url.toURI().resolve("sum?n=" + Joiner.on("&n=").join(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)).toURL();
HttpURLConnection urlConn = (HttpURLConnection) sumURL.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, urlConn.getResponseCode());
try (InputStream is = urlConn.getInputStream()) {
Assert.assertEquals(55, Integer.parseInt(new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8)));
}
}
Aggregations