use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.
the class AuthorizationTest method addDummyData.
private void addDummyData(NamespaceId namespaceId, String datasetName) throws Exception {
DataSetManager<KeyValueTable> tableManager = getDataset(namespaceId.dataset(datasetName));
KeyValueTable inputTable = tableManager.get();
inputTable.write("hello", "world");
tableManager.flush();
}
use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.
the class AuthorizationTest method testCrossNSDatasetAccessFromFlowlet.
private void testCrossNSDatasetAccessFromFlowlet(final FlowManager flowManager) throws Exception {
NamespaceMeta outputDatasetNS = new NamespaceMeta.Builder().setName("outputNS").build();
getNamespaceAdmin().create(outputDatasetNS);
addDatasetInstance(outputDatasetNS.getNamespaceId().dataset("store"), "keyValueTable");
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
Map<String, String> args = ImmutableMap.of(CrossNsDatasetAccessApp.OUTPUT_DATASET_NS, outputDatasetNS.getNamespaceId().getNamespace(), CrossNsDatasetAccessApp.OUTPUT_DATASET_NAME, "store");
// But trying to run a flow as BOB will fail since this flow writes to a dataset in another namespace in which
// is not accessible to BOB.
flowManager.start(args);
// wait for flow to be running
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return flowManager.isRunning();
}
}, 120, TimeUnit.SECONDS);
// The above will be a runtime failure after the flow start since it will not be able to use the dataset in the
// another namespace. Since the failure will lead to no metrics being emitted we cannot actually check it tried
// processing or not. So stop the flow and check that the output dataset is empty
flowManager.stop();
SecurityRequestContext.setUserId(ALICE.getName());
assertDatasetIsEmpty(outputDatasetNS.getNamespaceId(), "store");
// Give BOB permission to write to the dataset in another namespace
grantAndAssertSuccess(outputDatasetNS.getNamespaceId().dataset("store"), BOB, EnumSet.of(Action.WRITE));
// switch back to BOB to run flow again
SecurityRequestContext.setUserId(BOB.getName());
// running the flow now should pass and write data in another namespace successfully
flowManager.start(args);
flowManager.getFlowletMetrics("saver").waitForProcessed(10, 30, TimeUnit.SECONDS);
// switch back to alice and verify the data its fine now to verify the run record here because if the flow failed
// to write we will not see any data
SecurityRequestContext.setUserId(ALICE.getName());
DataSetManager<KeyValueTable> dataSetManager = getDataset(outputDatasetNS.getNamespaceId().dataset("store"));
KeyValueTable results = dataSetManager.get();
for (int i = 0; i < 10; i++) {
byte[] key = String.valueOf(i).getBytes(Charsets.UTF_8);
Assert.assertArrayEquals(key, results.read(key));
}
flowManager.stop();
getNamespaceAdmin().delete(outputDatasetNS.getNamespaceId());
}
use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.
the class AdminAppTestRun method testAdminFlow.
@Test
public void testAdminFlow() throws Exception {
// start the worker and wait for it to finish
FlowManager flowManager = appManager.getFlowManager(AdminApp.FLOW_NAME).start();
try {
flowManager.waitForRun(ProgramRunStatus.RUNNING, 5, TimeUnit.MINUTES);
// send some events to the stream
StreamManager streamManager = getStreamManager("events");
streamManager.send("aa ab bc aa bc");
streamManager.send("xx xy aa ab aa");
// wait for flow to process them
flowManager.getFlowletMetrics("counter").waitForProcessed(10, 30, TimeUnit.SECONDS);
// validate that the flow created tables for a, b, and x, and that the counts are correct
DataSetManager<KeyValueTable> aManager = getDataset("counters_a");
Assert.assertNotNull(aManager.get());
Assert.assertEquals(4L, Bytes.toLong(aManager.get().read("aa")));
Assert.assertEquals(2L, Bytes.toLong(aManager.get().read("ab")));
DataSetManager<KeyValueTable> bManager = getDataset("counters_b");
Assert.assertNotNull(bManager.get());
Assert.assertEquals(2L, Bytes.toLong(bManager.get().read("bc")));
DataSetManager<KeyValueTable> xManager = getDataset("counters_x");
Assert.assertNotNull(xManager.get());
Assert.assertEquals(1L, Bytes.toLong(xManager.get().read("xx")));
Assert.assertEquals(1L, Bytes.toLong(xManager.get().read("xy")));
} finally {
flowManager.stop();
}
flowManager.waitForRun(ProgramRunStatus.KILLED, 30, TimeUnit.SECONDS);
// flowlet destroy() deletes all the tables - validate
Assert.assertNull(getDataset("counters_a").get());
Assert.assertNull(getDataset("counters_b").get());
Assert.assertNull(getDataset("counters_x").get());
}
use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.
the class AuthorizationTest method testSparkStreamAuth.
@Test
@Category(SlowTests.class)
public void testSparkStreamAuth() throws Exception {
createAuthNamespace();
Authorizer authorizer = getAuthorizer();
StreamId streamId = AUTH_NAMESPACE.stream(StreamAuthApp.STREAM);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, StreamAuthApp.class);
// After deploy, change Alice from ALL to ADMIN on the namespace
authorizer.revoke(AUTH_NAMESPACE, ALICE, EnumSet.allOf(Action.class));
authorizer.grant(AUTH_NAMESPACE, ALICE, EnumSet.of(Action.ADMIN));
StreamManager streamManager = getStreamManager(AUTH_NAMESPACE.stream(StreamAuthApp.STREAM));
streamManager.send("Hello");
final SparkManager sparkManager = appManager.getSparkManager(StreamAuthApp.SPARK);
sparkManager.start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("Hello");
Assert.assertArrayEquals(Bytes.toBytes("Hello"), value);
}
streamManager.send("World");
// Revoke READ permission on STREAM for Alice
authorizer.revoke(streamId, ALICE, EnumSet.allOf(Action.class));
authorizer.grant(streamId, ALICE, EnumSet.of(Action.WRITE, Action.ADMIN, Action.EXECUTE));
sparkManager.start();
sparkManager.waitForRun(ProgramRunStatus.FAILED, 1, TimeUnit.MINUTES);
kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertNull(value);
}
// Grant ALICE, READ permission on STREAM and now Spark job should run successfully
authorizer.grant(streamId, ALICE, ImmutableSet.of(Action.READ));
sparkManager.start();
sparkManager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 1, TimeUnit.MINUTES);
kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertArrayEquals(Bytes.toBytes("World"), value);
}
appManager.delete();
assertNoAccess(AUTH_NAMESPACE.app(StreamAuthApp.APP));
}
use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.
the class MapReduceStreamInputTestRun method test.
@Test
public void test() throws Exception {
ApplicationManager applicationManager = deployApplication(AppWithMapReduceUsingStream.class);
Schema schema = new Schema.Parser().parse(AppWithMapReduceUsingStream.SCHEMA.toString());
StreamManager streamManager = getStreamManager("mrStream");
streamManager.send(createEvent(schema, "YHOO", 100, 10.0f));
streamManager.send(createEvent(schema, "YHOO", 10, 10.1f));
streamManager.send(createEvent(schema, "YHOO", 13, 9.9f));
float yhooTotal = 100 * 10.0f + 10 * 10.1f + 13 * 9.9f;
streamManager.send(createEvent(schema, "AAPL", 5, 300.0f));
streamManager.send(createEvent(schema, "AAPL", 3, 298.34f));
streamManager.send(createEvent(schema, "AAPL", 50, 305.23f));
streamManager.send(createEvent(schema, "AAPL", 1000, 284.13f));
float aaplTotal = 5 * 300.0f + 3 * 298.34f + 50 * 305.23f + 1000 * 284.13f;
MapReduceManager mrManager = applicationManager.getMapReduceManager("BodyTracker").start();
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 180, TimeUnit.SECONDS);
KeyValueTable pricesDS = (KeyValueTable) getDataset("prices").get();
float yhooVal = Bytes.toFloat(pricesDS.read(Bytes.toBytes("YHOO")));
float aaplVal = Bytes.toFloat(pricesDS.read(Bytes.toBytes("AAPL")));
Assert.assertTrue(Math.abs(yhooTotal - yhooVal) < 0.0000001);
Assert.assertTrue(Math.abs(aaplTotal - aaplVal) < 0.0000001);
}
Aggregations