use of co.cask.cdap.test.WorkerManager in project cdap by caskdata.
the class ETLWorkerTest method testOneSourceOneSink.
@Test
@Category(SlowTests.class)
public void testOneSourceOneSink() throws Exception {
Schema schema = Schema.recordOf("test", Schema.Field.of("id", Schema.of(Schema.Type.STRING)), Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
List<StructuredRecord> input = new ArrayList<>();
input.add(StructuredRecord.builder(schema).set("id", "123").set("name", "samuel").build());
input.add(StructuredRecord.builder(schema).set("id", "456").set("name", "jackson").build());
File tmpDir = TMP_FOLDER.newFolder();
ETLRealtimeConfig etlConfig = ETLRealtimeConfig.builder().addStage(new ETLStage("source", MockSource.getPlugin(input))).addStage(new ETLStage("sink", MockSink.getPlugin(tmpDir))).addConnection("source", "sink").build();
ApplicationId appId = NamespaceId.DEFAULT.app("simpleApp");
AppRequest<ETLRealtimeConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationManager appManager = deployApplication(appId, appRequest);
WorkerManager workerManager = appManager.getWorkerManager(ETLWorker.NAME);
workerManager.start();
workerManager.waitForStatus(true, 10, 1);
try {
List<StructuredRecord> written = MockSink.getRecords(tmpDir, 0, 10, TimeUnit.SECONDS);
Assert.assertEquals(input, written);
} finally {
stopWorker(workerManager);
}
validateMetric(2, appId, "source.records.out");
validateMetric(2, appId, "sink.records.in");
}
use of co.cask.cdap.test.WorkerManager in project cdap by caskdata.
the class ETLWorkerTest method testLookup.
@Test
public void testLookup() throws Exception {
addDatasetInstance(KeyValueTable.class.getName(), "lookupTable");
DataSetManager<KeyValueTable> lookupTable = getDataset("lookupTable");
lookupTable.get().write("Bob".getBytes(Charsets.UTF_8), "123".getBytes(Charsets.UTF_8));
lookupTable.flush();
File outDir = TMP_FOLDER.newFolder();
ETLRealtimeConfig etlConfig = ETLRealtimeConfig.builder().addStage(new ETLStage("source", LookupSource.getPlugin(ImmutableSet.of("Bob", "Bill"), "lookupTable"))).addStage(new ETLStage("sink", MockSink.getPlugin(outDir))).addConnection("source", "sink").build();
ApplicationId appId = NamespaceId.DEFAULT.app("lookupTestApp");
AppRequest<ETLRealtimeConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationManager appManager = deployApplication(appId, appRequest);
WorkerManager workerManager = appManager.getWorkerManager(ETLWorker.NAME);
workerManager.start();
workerManager.waitForStatus(true, 10, 1);
Schema schema = Schema.recordOf("bobbill", Schema.Field.of("Bob", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("Bill", Schema.nullableOf(Schema.of(Schema.Type.STRING))));
List<StructuredRecord> expected = new ArrayList<>();
expected.add(StructuredRecord.builder(schema).set("Bob", "123").build());
try {
List<StructuredRecord> actual = MockSink.getRecords(outDir, 0, 10, TimeUnit.SECONDS);
Assert.assertEquals(expected, actual);
} finally {
stopWorker(workerManager);
}
validateMetric(1, appId, "source.records.out");
validateMetric(1, appId, "sink.records.in");
}
use of co.cask.cdap.test.WorkerManager in project cdap by caskdata.
the class ETLWorkerTest method testEmptyProperties.
@Test
public void testEmptyProperties() throws Exception {
// Set properties to null to test if ETLTemplate can handle it.
ETLRealtimeConfig etlConfig = ETLRealtimeConfig.builder().addStage(new ETLStage("source", MockSource.getPlugin(null))).addStage(new ETLStage("sink", MockSink.getPlugin(null))).addConnection("source", "sink").setInstances(2).build();
ApplicationId appId = NamespaceId.DEFAULT.app("emptyTest");
AppRequest<ETLRealtimeConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationManager appManager = deployApplication(appId, appRequest);
Assert.assertNotNull(appManager);
WorkerManager workerManager = appManager.getWorkerManager(ETLWorker.NAME);
workerManager.start();
workerManager.waitForStatus(true, 10, 1);
try {
Assert.assertEquals(2, workerManager.getInstances());
} finally {
stopWorker(workerManager);
}
}
use of co.cask.cdap.test.WorkerManager in project cdap by caskdata.
the class AuthorizationTest method testWorkerStreamAuth.
@Test
@Category(SlowTests.class)
public void testWorkerStreamAuth() throws Exception {
createAuthNamespace();
Authorizer authorizer = getAuthorizer();
setUpPrivilegeToDeployStreamAuthApp();
StreamId streamId = AUTH_NAMESPACE.stream(StreamAuthApp.STREAM);
Map<EntityId, Set<Action>> additionalPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(streamId, EnumSet.of(Action.READ, Action.WRITE)).put(AUTH_NAMESPACE.app(StreamAuthApp.APP).worker(StreamAuthApp.WORKER), EnumSet.of(Action.EXECUTE)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, additionalPrivileges);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, StreamAuthApp.class);
WorkerManager workerManager = appManager.getWorkerManager(StreamAuthApp.WORKER);
workerManager.start();
workerManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
StreamManager streamManager = getStreamManager(AUTH_NAMESPACE.stream(StreamAuthApp.STREAM));
Assert.assertEquals(5, streamManager.getEvents(0, Long.MAX_VALUE, Integer.MAX_VALUE).size());
// Now revoke write permission for Alice on that stream
authorizer.revoke(Authorizable.fromEntityId(streamId), ALICE, EnumSet.of(Action.WRITE));
workerManager.start();
workerManager.waitForRuns(ProgramRunStatus.FAILED, 1, 60, TimeUnit.SECONDS);
Assert.assertEquals(5, streamManager.getEvents(0, Long.MAX_VALUE, Integer.MAX_VALUE).size());
appManager.delete();
}
use of co.cask.cdap.test.WorkerManager in project cdap by caskdata.
the class TestFrameworkTestRun method testAppWithPlugin.
@Test
public void testAppWithPlugin() throws Exception {
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
addAppArtifact(artifactId, AppWithPlugin.class);
ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
ApplicationManager appManager = deployApplication(appId, createRequest);
final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
workerManager.start();
workerManager.waitForStatus(false, 5, 1);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return workerManager.getHistory(ProgramRunStatus.COMPLETED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
serviceManager.start();
serviceManager.waitForStatus(true, 1, 10);
URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
callServiceGet(serviceURL, "dummy");
serviceManager.stop();
serviceManager.waitForStatus(false, 1, 10);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return serviceManager.getHistory(ProgramRunStatus.KILLED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
workflowManager.start();
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
List<RunRecord> runRecords = workflowManager.getHistory();
Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
String value = Bytes.toString(workflowTableManager.get().read("val"));
Assert.assertEquals(AppWithPlugin.TEST, value);
Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
// Testing Spark Plugins. First send some data to stream for the Spark program to process
StreamManager streamManager = getStreamManager(AppWithPlugin.SPARK_STREAM);
for (int i = 0; i < 5; i++) {
streamManager.send("Message " + i);
}
SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
// Verify the Spark result.
DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
Table table = dataSetManager.get();
try (Scanner scanner = table.scan(null, null)) {
for (int i = 0; i < 5; i++) {
Row row = scanner.next();
Assert.assertNotNull(row);
String expected = "Message " + i + " " + AppWithPlugin.TEST;
Assert.assertEquals(expected, Bytes.toString(row.getRow()));
Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
}
// There shouldn't be any more rows in the table.
Assert.assertNull(scanner.next());
}
}
Aggregations