use of co.cask.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class DeleteDatasetModuleCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
DatasetModuleId module = cliConfig.getCurrentNamespace().datasetModule(arguments.get(ArgumentName.DATASET_MODULE.toString()));
datasetClient.delete(module);
output.printf("Successfully deleted dataset module '%s'\n", module.getEntityName());
}
use of co.cask.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class DatasetTypeManager method deleteModule.
/**
* Deletes specified dataset module
* @param datasetModuleId {@link DatasetModuleId} of the dataset module to delete
* @return true if deleted successfully, false if module didn't exist: nothing to delete
* @throws DatasetModuleConflictException when there are other modules depend on the specified one, in which case
* deletion does NOT happen
*/
public boolean deleteModule(final DatasetModuleId datasetModuleId) throws DatasetModuleConflictException {
LOG.info("Deleting module {}", datasetModuleId);
try {
final DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
final DatasetInstanceMDS datasetInstanceMDS = datasetCache.getDataset(DatasetMetaTableUtil.INSTANCE_TABLE_NAME);
return txExecutorFactory.createExecutor(datasetCache).execute(new Callable<Boolean>() {
@Override
public Boolean call() throws DatasetModuleConflictException, IOException {
final DatasetModuleMeta module = datasetTypeMDS.getModule(datasetModuleId);
if (module == null) {
return false;
}
// cannot delete when there's module that uses it
if (module.getUsedByModules().size() > 0) {
String msg = String.format("Cannot delete module %s: other modules depend on it. Delete them first", module);
throw new DatasetModuleConflictException(msg);
}
Collection<DatasetSpecification> instances = datasetInstanceMDS.getByTypes(datasetModuleId.getParent(), ImmutableSet.copyOf(module.getTypes()));
// cannot delete when there's instance that uses it
if (!instances.isEmpty()) {
String msg = String.format("Cannot delete module %s: other instances depend on it. Delete them first", module);
throw new DatasetModuleConflictException(msg);
}
// remove it from "usedBy" from other modules
for (String usedModuleName : module.getUsesModules()) {
DatasetModuleId usedModuleId = new DatasetModuleId(datasetModuleId.getNamespace(), usedModuleName);
// not using getModuleWithFallback here because we want to know the namespace in which usedModule was found,
// so we can overwrite it in the MDS in the appropriate namespace
DatasetModuleMeta usedModule = datasetTypeMDS.getModule(usedModuleId);
// if the usedModule is not found in the current namespace, try finding it in the system namespace
if (usedModule == null) {
usedModuleId = NamespaceId.SYSTEM.datasetModule(usedModuleName);
usedModule = datasetTypeMDS.getModule(usedModuleId);
Preconditions.checkState(usedModule != null, "Could not find a module %s that the module %s uses.", usedModuleName, datasetModuleId.getEntityName());
}
usedModule.removeUsedByModule(datasetModuleId.getEntityName());
datasetTypeMDS.writeModule(usedModuleId.getParent(), usedModule);
}
datasetTypeMDS.deleteModule(datasetModuleId);
try {
// Also delete module jar
Location moduleJarLocation = impersonator.doAs(datasetModuleId, new Callable<Location>() {
@Override
public Location call() throws Exception {
return Locations.getLocationFromAbsolutePath(locationFactory, module.getJarLocationPath());
}
});
if (!moduleJarLocation.delete()) {
LOG.debug("Could not delete dataset module archive");
}
} catch (Exception e) {
// the only checked exception the try-catch throws is IOException
Throwables.propagateIfInstanceOf(e, IOException.class);
Throwables.propagate(e);
}
return true;
}
});
} catch (TransactionFailureException e) {
if (e.getCause() != null && e.getCause() instanceof DatasetModuleConflictException) {
throw (DatasetModuleConflictException) e.getCause();
}
throw Throwables.propagate(e);
} catch (Exception e) {
LOG.error("Operation failed", e);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class DatasetTypeService method deployExtensionModules.
private void deployExtensionModules() {
// adding any defined extension modules to be available in dataset manager service
for (Map.Entry<String, DatasetModule> module : extensionModules.entrySet()) {
try {
// NOTE: we assume extension modules are always in classpath, hence passing null for jar location
// NOTE: we add extension modules in the system namespace
DatasetModuleId theModule = NamespaceId.SYSTEM.datasetModule(module.getKey());
typeManager.addModule(theModule, module.getValue().getClass().getName(), null, false);
grantAllPrivilegesOnModule(theModule, authenticationContext.getPrincipal());
} catch (DatasetModuleConflictException e) {
// perfectly fine: we need to add the modules only the very first time service is started
LOG.debug("Not adding {} extension module: it already exists", module.getKey());
} catch (Throwable th) {
LOG.error("Failed to add {} extension module. Aborting.", module.getKey(), th);
throw Throwables.propagate(th);
}
}
}
use of co.cask.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class GenerateClientUsageExample method datasetModuleClient.
public void datasetModuleClient() throws Exception {
// Construct the client used to interact with CDAP
DatasetModuleClient datasetModuleClient = new DatasetModuleClient(clientConfig);
// Add a dataset module
File moduleJarFile = createAppJarFile(SomeDatasetModule.class);
DatasetModuleId datasetModuleId = NamespaceId.DEFAULT.datasetModule("someDatasetModule");
datasetModuleClient.add(datasetModuleId, SomeDatasetModule.class.getName(), moduleJarFile);
// Fetch the dataset module information
DatasetModuleMeta datasetModuleMeta = datasetModuleClient.get(datasetModuleId);
// Delete all dataset modules
datasetModuleClient.deleteAll(NamespaceId.DEFAULT);
}
use of co.cask.cdap.proto.id.DatasetModuleId in project cdap by caskdata.
the class ExploreDisabledTest method testDeployNotRecordScannable.
@Test
public void testDeployNotRecordScannable() throws Exception {
// Try to deploy a dataset that is not record scannable, when explore is enabled.
// This should be processed with no exceptionbeing thrown
DatasetModuleId module2 = namespaceId.datasetModule("module2");
DatasetId instance2 = namespaceId.dataset("table1");
datasetFramework.addModule(module2, new NotRecordScannableTableDefinition.NotRecordScannableTableModule());
// Performing admin operations to create dataset instance
datasetFramework.addInstance("NotRecordScannableTableDef", instance2, DatasetProperties.EMPTY);
Transaction tx1 = transactionManager.startShort(100);
// Accessing dataset instance to perform data operations
NotRecordScannableTableDefinition.KeyValueTable table = datasetFramework.getDataset(instance2, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
table.startTx(tx1);
table.write("key1", "value1");
table.write("key2", "value2");
byte[] value = table.read("key1");
Assert.assertEquals("value1", Bytes.toString(value));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx1, table.getTxChanges());
transactionManager.commit(tx1);
table.postTxCommit();
Transaction tx2 = transactionManager.startShort(100);
table.startTx(tx2);
value = table.read("key1");
Assert.assertNotNull(value);
Assert.assertEquals("value1", Bytes.toString(value));
datasetFramework.deleteInstance(instance2);
datasetFramework.deleteModule(module2);
}
Aggregations