use of io.cdap.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class DefaultDatasetTypeService method deployDefaultModules.
private void deployDefaultModules() throws Exception {
// adding default modules to be available in dataset manager service
for (Map.Entry<String, DatasetModule> module : defaultModules.entrySet()) {
try {
// NOTE: we assume default modules are always in classpath, hence passing null for jar location
// NOTE: we add default modules in the system namespace
DatasetModuleId defaultModule = NamespaceId.SYSTEM.datasetModule(module.getKey());
typeManager.addModule(defaultModule, module.getValue().getClass().getName(), null, false);
} catch (DatasetModuleConflictException e) {
// perfectly fine: we need to add default modules only the very first time service is started
LOG.debug("Not adding {} module: it already exists", module.getKey());
} catch (Throwable th) {
LOG.error("Failed to add {} module. Aborting.", module.getKey(), th);
throw Throwables.propagate(th);
}
}
}
use of io.cdap.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class AuthorizationDatasetTypeService method deleteAll.
@Override
public void deleteAll(NamespaceId namespaceId) throws Exception {
Principal principal = authenticationContext.getPrincipal();
for (DatasetModuleMeta meta : delegate.listModules(namespaceId)) {
DatasetModuleId datasetModuleId = namespaceId.datasetModule(meta.getName());
accessEnforcer.enforce(datasetModuleId, principal, StandardPermission.DELETE);
}
delegate.deleteAll(namespaceId);
}
use of io.cdap.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class DatasetClientTestRun method testAll.
@Test
public void testAll() throws Exception {
DatasetModuleId module = TEST_NAMESPACE.datasetModule(StandaloneDatasetModule.NAME);
DatasetTypeId type = TEST_NAMESPACE.datasetType(StandaloneDataset.class.getName());
DatasetModuleId moduleInOtherNamespace = OTHER_NAMESPACE.datasetModule(StandaloneDatasetModule.NAME);
DatasetTypeId typeInOtherNamespace = OTHER_NAMESPACE.datasetType(StandaloneDataset.class.getName());
int numBaseModules = moduleClient.list(TEST_NAMESPACE).size();
int numBaseTypes = typeClient.list(TEST_NAMESPACE).size();
LOG.info("Adding Dataset module");
File moduleJarFile = createAppJarFile(StandaloneDatasetModule.class);
moduleClient.add(TEST_NAMESPACE.datasetModule(StandaloneDatasetModule.NAME), StandaloneDatasetModule.class.getName(), moduleJarFile);
Assert.assertEquals(numBaseModules + 1, moduleClient.list(TEST_NAMESPACE).size());
Assert.assertEquals(numBaseTypes + 2, typeClient.list(TEST_NAMESPACE).size());
LOG.info("Checking that the new Dataset module exists");
DatasetModuleMeta datasetModuleMeta = moduleClient.get(module);
Assert.assertNotNull(datasetModuleMeta);
Assert.assertEquals(StandaloneDatasetModule.NAME, datasetModuleMeta.getName());
LOG.info("Checking that the new Dataset module does not exist in a different namespace");
try {
moduleClient.get(moduleInOtherNamespace);
Assert.fail("datasetModule found in namespace other than one in which it was expected");
} catch (DatasetModuleNotFoundException expected) {
// expected
}
LOG.info("Checking that the new Dataset type exists");
DatasetTypeMeta datasetTypeMeta = typeClient.get(type);
Assert.assertNotNull(datasetTypeMeta);
Assert.assertEquals(type.getType(), datasetTypeMeta.getName());
datasetTypeMeta = typeClient.get(type);
Assert.assertNotNull(datasetTypeMeta);
Assert.assertEquals(StandaloneDataset.class.getName(), datasetTypeMeta.getName());
LOG.info("Checking that the new Dataset type does not exist in a different namespace");
try {
typeClient.get(typeInOtherNamespace);
Assert.fail("datasetType found in namespace other than one in which it was expected");
} catch (DatasetTypeNotFoundException expected) {
// expected
}
LOG.info("Creating, truncating, and deleting dataset of new Dataset type");
// Before creating dataset, there are some system datasets already exist
int numBaseDataset = datasetClient.list(TEST_NAMESPACE).size();
DatasetId instance = TEST_NAMESPACE.dataset("testDataset");
String description = "test description";
datasetClient.create(instance, new DatasetInstanceConfiguration(StandaloneDataset.TYPE_NAME, Collections.<String, String>emptyMap(), description, null));
Assert.assertEquals(numBaseDataset + 1, datasetClient.list(TEST_NAMESPACE).size());
// Assert dataset summary for the newly created dataset
DatasetSpecificationSummary expectedSpec = new DatasetSpecificationSummary(instance.getDataset(), StandaloneDataset.TYPE_NAME, description, Collections.<String, String>emptyMap());
Assert.assertEquals(expectedSpec, getSpecForDataset(instance, datasetClient.list(TEST_NAMESPACE)));
datasetClient.truncate(instance);
DatasetMeta metaBefore = datasetClient.get(instance);
Assert.assertEquals(0, metaBefore.getSpec().getProperties().size());
datasetClient.update(instance, ImmutableMap.of("sdf", "foo", "abc", "123"));
DatasetMeta metaAfter = datasetClient.get(instance);
Assert.assertEquals(2, metaAfter.getSpec().getProperties().size());
Assert.assertTrue(metaAfter.getSpec().getProperties().containsKey("sdf"));
Assert.assertTrue(metaAfter.getSpec().getProperties().containsKey("abc"));
Assert.assertEquals("foo", metaAfter.getSpec().getProperties().get("sdf"));
Assert.assertEquals("123", metaAfter.getSpec().getProperties().get("abc"));
datasetClient.updateExisting(instance, ImmutableMap.of("sdf", "fzz"));
metaAfter = datasetClient.get(instance);
Assert.assertEquals(2, metaAfter.getSpec().getProperties().size());
Assert.assertTrue(metaAfter.getSpec().getProperties().containsKey("sdf"));
Assert.assertTrue(metaAfter.getSpec().getProperties().containsKey("abc"));
Assert.assertEquals("fzz", metaAfter.getSpec().getProperties().get("sdf"));
Assert.assertEquals("123", metaAfter.getSpec().getProperties().get("abc"));
datasetClient.delete(instance);
Assert.assertEquals(numBaseDataset, datasetClient.list(TEST_NAMESPACE).size());
LOG.info("Creating and deleting multiple Datasets");
for (int i = 1; i <= 3; i++) {
datasetClient.create(TEST_NAMESPACE.dataset("testDataset" + i), StandaloneDataset.TYPE_NAME);
}
Assert.assertEquals(numBaseDataset + 3, datasetClient.list(TEST_NAMESPACE).size());
for (int i = 1; i <= 3; i++) {
datasetClient.delete(TEST_NAMESPACE.dataset("testDataset" + i));
}
Assert.assertEquals(numBaseDataset, datasetClient.list(TEST_NAMESPACE).size());
LOG.info("Deleting Dataset module");
moduleClient.delete(module);
Assert.assertEquals(numBaseModules, moduleClient.list(TEST_NAMESPACE).size());
Assert.assertEquals(numBaseTypes, typeClient.list(TEST_NAMESPACE).size());
LOG.info("Adding Dataset module and then deleting all Dataset modules");
moduleClient.add(TEST_NAMESPACE.datasetModule("testModule1"), StandaloneDatasetModule.class.getName(), moduleJarFile);
Assert.assertEquals(numBaseModules + 1, moduleClient.list(TEST_NAMESPACE).size());
Assert.assertEquals(numBaseTypes + 2, typeClient.list(TEST_NAMESPACE).size());
moduleClient.deleteAll(TEST_NAMESPACE);
Assert.assertEquals(numBaseModules, moduleClient.list(TEST_NAMESPACE).size());
Assert.assertEquals(numBaseTypes, typeClient.list(TEST_NAMESPACE).size());
}
use of io.cdap.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class NamespacedEntityIdCodecTest method testUnsupported.
@Test(expected = UnsupportedOperationException.class)
public void testUnsupported() {
DatasetModuleId module = new DatasetModuleId("ns", "module");
String moduleJson = GSON.toJson(module, NamespacedEntityId.class);
GSON.fromJson(moduleJson, NamespacedEntityId.class);
}
use of io.cdap.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class ExploreDisabledTest method testDeployRecordScannable.
@Test
public void testDeployRecordScannable() throws Exception {
// Try to deploy a dataset that is not record scannable, when explore is enabled.
// This should be processed with no exception being thrown
DatasetModuleId module1 = new DatasetModuleId(namespaceId.getNamespace(), "module1");
DatasetId instance1 = namespaceId.dataset("table1");
datasetFramework.addModule(module1, new KeyStructValueTableDefinition.KeyStructValueTableModule());
// Performing admin operations to create dataset instance
datasetFramework.addInstance("keyStructValueTable", instance1, DatasetProperties.EMPTY);
Transaction tx1 = transactionManager.startShort(100);
// Accessing dataset instance to perform data operations
KeyStructValueTableDefinition.KeyStructValueTable table = datasetFramework.getDataset(instance1, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
table.startTx(tx1);
KeyStructValueTableDefinition.KeyValue.Value value1 = new KeyStructValueTableDefinition.KeyValue.Value("first", Lists.newArrayList(1, 2, 3, 4, 5));
KeyStructValueTableDefinition.KeyValue.Value value2 = new KeyStructValueTableDefinition.KeyValue.Value("two", Lists.newArrayList(10, 11, 12, 13, 14));
table.put("1", value1);
table.put("2", value2);
Assert.assertEquals(value1, table.get("1"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx1.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx1.getTransactionId(), tx1.getWritePointer());
table.postTxCommit();
Transaction tx2 = transactionManager.startShort(100);
table.startTx(tx2);
Assert.assertEquals(value1, table.get("1"));
datasetFramework.deleteInstance(instance1);
datasetFramework.deleteModule(module1);
}
Aggregations