use of io.cdap.cdap.api.dataset.lib.KeyValueTable in project cdap by cdapio.
the class Spark2Test method testScalaSparkWithObjectStore.
@Test
public void testScalaSparkWithObjectStore() throws Exception {
ApplicationManager applicationManager = deploy(NamespaceId.DEFAULT, SparkAppUsingObjectStore.class);
DataSetManager<ObjectStore<String>> keysManager = getDataset("keys");
prepareInputData(keysManager);
SparkManager sparkManager = applicationManager.getSparkManager(ScalaCharCountProgram.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.RUNNING, 10, TimeUnit.SECONDS);
sparkManager.waitForStopped(60, TimeUnit.SECONDS);
DataSetManager<KeyValueTable> countManager = getDataset("count");
checkOutputData(countManager);
}
use of io.cdap.cdap.api.dataset.lib.KeyValueTable in project cdap by cdapio.
the class AuthorizationTest method testCrossNSDatasetAccessFromService.
private void testCrossNSDatasetAccessFromService(ServiceManager serviceManager) throws Exception {
NamespaceMeta outputDatasetNS = new NamespaceMeta.Builder().setName("outputNS").build();
NamespaceId outputDatasetNSId = outputDatasetNS.getNamespaceId();
DatasetId datasetId = outputDatasetNSId.dataset("store");
Map<EntityId, Set<? extends Permission>> neededPrivileges = ImmutableMap.<EntityId, Set<? extends Permission>>builder().put(outputDatasetNSId, EnumSet.of(StandardPermission.GET, StandardPermission.CREATE, StandardPermission.DELETE)).put(datasetId, EnumSet.of(StandardPermission.CREATE, StandardPermission.GET, StandardPermission.DELETE)).put(outputDatasetNSId.datasetType("keyValueTable"), EnumSet.of(StandardPermission.UPDATE)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
getNamespaceAdmin().create(outputDatasetNS);
addDatasetInstance(datasetId, "keyValueTable");
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
Map<String, String> args = ImmutableMap.of(CrossNsDatasetAccessApp.OUTPUT_DATASET_NS, outputDatasetNS.getNamespaceId().getNamespace(), CrossNsDatasetAccessApp.OUTPUT_DATASET_NAME, "store");
// Start the service as BOB
serviceManager.start(args);
// Call to the service would result in failure due to BOB doesn't have permission on the namespace as set in args
URL url = new URL(serviceManager.getServiceURL(5, TimeUnit.SECONDS), "write/data");
HttpResponse response = executeAuthenticated(HttpRequest.put(url));
Assert.assertEquals(500, response.getResponseCode());
// This is a hack that works around the fact that we cannot properly catch exceptions in the service handler.
// TODO: Figure out a way to stop checking error messages.
Assert.assertTrue("Wrong message " + response.getResponseBodyAsString(), response.getResponseBodyAsString().contains("'" + BOB + "' has insufficient privileges"));
serviceManager.stop();
serviceManager.waitForStopped(10, TimeUnit.SECONDS);
SecurityRequestContext.setUserId(ALICE.getName());
assertDatasetIsEmpty(outputDatasetNS.getNamespaceId(), "store");
// Give BOB permission to write to the dataset in another namespace
grantAndAssertSuccess(datasetId, BOB, EnumSet.of(StandardPermission.GET, StandardPermission.UPDATE));
// switch back to BOB to run service again
SecurityRequestContext.setUserId(BOB.getName());
// Write data in another namespace should be successful now
serviceManager.start(args);
for (int i = 0; i < 10; i++) {
url = new URL(serviceManager.getServiceURL(5, TimeUnit.SECONDS), "write/" + i);
response = executeAuthenticated(HttpRequest.put(url));
Assert.assertEquals(200, response.getResponseCode());
}
serviceManager.stop();
serviceManager.waitForStopped(10, TimeUnit.SECONDS);
// switch back to alice and verify the data its fine now to verify.
SecurityRequestContext.setUserId(ALICE.getName());
DataSetManager<KeyValueTable> dataSetManager = getDataset(outputDatasetNS.getNamespaceId().dataset("store"));
KeyValueTable results = dataSetManager.get();
for (int i = 0; i < 10; i++) {
byte[] key = String.valueOf(i).getBytes(Charsets.UTF_8);
Assert.assertArrayEquals(key, results.read(key));
}
getNamespaceAdmin().delete(outputDatasetNS.getNamespaceId());
}
use of io.cdap.cdap.api.dataset.lib.KeyValueTable in project cdap by cdapio.
the class AuthorizationTest method verifyDummyData.
private void verifyDummyData(NamespaceId namespaceId, String datasetName) throws Exception {
DataSetManager<KeyValueTable> outTableManager = getDataset(namespaceId.dataset(datasetName));
KeyValueTable outputTable = outTableManager.get();
Assert.assertEquals("world", Bytes.toString(outputTable.read("hello")));
}
use of io.cdap.cdap.api.dataset.lib.KeyValueTable in project cdap by cdapio.
the class ObjectStoreDatasetTest method testWithCustomClassLoader.
@Test
public void testWithCustomClassLoader() throws Exception {
DatasetId kv = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("kv");
// create a dummy class loader that records the name of the class it loaded
final AtomicReference<String> lastClassLoaded = new AtomicReference<>(null);
ClassLoader loader = new ClassLoader() {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
lastClassLoaded.set(name);
return super.loadClass(name);
}
};
dsFrameworkUtil.createInstance("keyValueTable", kv, DatasetProperties.EMPTY);
KeyValueTable kvTable = dsFrameworkUtil.getInstance(kv);
Type type = Custom.class;
TypeRepresentation typeRep = new TypeRepresentation(type);
Schema schema = new ReflectionSchemaGenerator().generate(type);
final ObjectStoreDataset<Custom> objectStore = new ObjectStoreDataset<>("kv", kvTable, typeRep, schema, loader);
TransactionExecutor txnl = dsFrameworkUtil.newInMemoryTransactionExecutor(objectStore);
// need to call this to actually load the Custom class, because the Custom class is no longer used in the
// ObjectStoreDataset's constructor, but rather lazily when its actually needed.
objectStore.getRecordType();
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
objectStore.write("dummy", new Custom(382, Lists.newArrayList("blah")));
}
});
// verify the class name was recorded (the dummy class loader was used).
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Assert.assertEquals(Custom.class.getName(), lastClassLoaded.get());
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
deleteAndVerify(objectStore, Bytes.toBytes("dummy"));
}
});
dsFrameworkUtil.deleteInstance(kv);
}
use of io.cdap.cdap.api.dataset.lib.KeyValueTable in project cdap by cdapio.
the class MapReduceProgramRunnerTest method testFailureInInit.
private void testFailureInInit(final String expected, ApplicationWithPrograms app, Class<?> programClass, Map<String, String> args) throws Exception {
// We want to verify that when a mapreduce fails during initialize(), especially
// if an input or output format provider fails to produce its configuration, the
// writes by that initialize() method are rolled back. (Background: prior to
// CDAP-7476, the input/output format provider was called *after* initialize
// returns, and therefore that transaction may have been committed already.
// (1) initialize the table with a known value
datasetCache.newTransactionContext();
final KeyValueTable kvTable = datasetCache.getDataset("recorder");
Transactions.createTransactionExecutor(txExecutorFactory, datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
// the table should not have initialized=true
kvTable.write("initialized", "false");
}
});
// 2) run job
runProgram(app, programClass, args, false);
// 3) verify results
Transactions.createTransactionExecutor(txExecutorFactory, datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
// the table should not have initialized=true
Assert.assertEquals(expected, Bytes.toString(kvTable.read("initialized")));
}
});
datasetCache.dismissTransactionContext();
}
Aggregations