use of co.cask.cdap.data2.datafabric.dataset.service.mds.DatasetTypeMDS in project cdap by caskdata.
the class DatasetTypeManager method deleteModule.
/**
* Deletes specified dataset module
* @param datasetModuleId {@link DatasetModuleId} of the dataset module to delete
* @return true if deleted successfully, false if module didn't exist: nothing to delete
* @throws DatasetModuleConflictException when there are other modules depend on the specified one, in which case
* deletion does NOT happen
*/
public boolean deleteModule(final DatasetModuleId datasetModuleId) throws DatasetModuleConflictException {
LOG.info("Deleting module {}", datasetModuleId);
try {
final DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
final DatasetInstanceMDS datasetInstanceMDS = datasetCache.getDataset(DatasetMetaTableUtil.INSTANCE_TABLE_NAME);
return txExecutorFactory.createExecutor(datasetCache).execute(new Callable<Boolean>() {
@Override
public Boolean call() throws DatasetModuleConflictException, IOException {
final DatasetModuleMeta module = datasetTypeMDS.getModule(datasetModuleId);
if (module == null) {
return false;
}
// cannot delete when there's module that uses it
if (module.getUsedByModules().size() > 0) {
String msg = String.format("Cannot delete module %s: other modules depend on it. Delete them first", module);
throw new DatasetModuleConflictException(msg);
}
Collection<DatasetSpecification> instances = datasetInstanceMDS.getByTypes(datasetModuleId.getParent(), ImmutableSet.copyOf(module.getTypes()));
// cannot delete when there's instance that uses it
if (!instances.isEmpty()) {
String msg = String.format("Cannot delete module %s: other instances depend on it. Delete them first", module);
throw new DatasetModuleConflictException(msg);
}
// remove it from "usedBy" from other modules
for (String usedModuleName : module.getUsesModules()) {
DatasetModuleId usedModuleId = new DatasetModuleId(datasetModuleId.getNamespace(), usedModuleName);
// not using getModuleWithFallback here because we want to know the namespace in which usedModule was found,
// so we can overwrite it in the MDS in the appropriate namespace
DatasetModuleMeta usedModule = datasetTypeMDS.getModule(usedModuleId);
// if the usedModule is not found in the current namespace, try finding it in the system namespace
if (usedModule == null) {
usedModuleId = NamespaceId.SYSTEM.datasetModule(usedModuleName);
usedModule = datasetTypeMDS.getModule(usedModuleId);
Preconditions.checkState(usedModule != null, "Could not find a module %s that the module %s uses.", usedModuleName, datasetModuleId.getEntityName());
}
usedModule.removeUsedByModule(datasetModuleId.getEntityName());
datasetTypeMDS.writeModule(usedModuleId.getParent(), usedModule);
}
datasetTypeMDS.deleteModule(datasetModuleId);
try {
// Also delete module jar
Location moduleJarLocation = impersonator.doAs(datasetModuleId, new Callable<Location>() {
@Override
public Location call() throws Exception {
return Locations.getLocationFromAbsolutePath(locationFactory, module.getJarLocationPath());
}
});
if (!moduleJarLocation.delete()) {
LOG.debug("Could not delete dataset module archive");
}
} catch (Exception e) {
// the only checked exception the try-catch throws is IOException
Throwables.propagateIfInstanceOf(e, IOException.class);
Throwables.propagate(e);
}
return true;
}
});
} catch (TransactionFailureException e) {
if (e.getCause() != null && e.getCause() instanceof DatasetModuleConflictException) {
throw (DatasetModuleConflictException) e.getCause();
}
throw Throwables.propagate(e);
} catch (Exception e) {
LOG.error("Operation failed", e);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.data2.datafabric.dataset.service.mds.DatasetTypeMDS in project cdap by caskdata.
the class DatasetTypeManager method deleteModules.
/**
* Deletes all modules in a namespace, other than system.
* Presumes that the namespace has already been checked to be non-system.
*
* @param namespaceId the {@link NamespaceId} to delete modules from.
*/
public void deleteModules(final NamespaceId namespaceId) throws DatasetModuleConflictException {
Preconditions.checkArgument(namespaceId != null && !NamespaceId.SYSTEM.equals(namespaceId), "Cannot delete modules from system namespace");
LOG.info("Deleting all modules from namespace {}", namespaceId);
try {
final DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
final DatasetInstanceMDS datasetInstanceMDS = datasetCache.getDataset(DatasetMetaTableUtil.INSTANCE_TABLE_NAME);
txExecutorFactory.createExecutor(datasetCache).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws DatasetModuleConflictException, IOException {
final Set<String> typesToDelete = new HashSet<String>();
final List<Location> moduleLocations = new ArrayList<>();
final Collection<DatasetModuleMeta> modules = datasetTypeMDS.getModules(namespaceId);
try {
impersonator.doAs(namespaceId, new Callable<Void>() {
@Override
public Void call() throws Exception {
for (DatasetModuleMeta module : modules) {
typesToDelete.addAll(module.getTypes());
moduleLocations.add(Locations.getLocationFromAbsolutePath(locationFactory, module.getJarLocationPath()));
}
return null;
}
});
} catch (Exception e) {
// the callable throws no checked exceptions
Throwables.propagate(e);
}
// check if there are any instances that use types of these modules?
Collection<DatasetSpecification> instances = datasetInstanceMDS.getByTypes(namespaceId, typesToDelete);
// cannot delete when there's instance that uses it
if (!instances.isEmpty()) {
throw new DatasetModuleConflictException("Cannot delete all modules: existing dataset instances depend on it. Delete them first");
}
datasetTypeMDS.deleteModules(namespaceId);
// Delete module locations
for (Location moduleLocation : moduleLocations) {
if (!moduleLocation.delete()) {
LOG.debug("Could not delete dataset module archive - {}", moduleLocation);
}
}
}
});
} catch (TransactionFailureException e) {
if (e.getCause() != null && e.getCause() instanceof DatasetModuleConflictException) {
throw (DatasetModuleConflictException) e.getCause();
}
LOG.error("Failed to delete all modules from namespace {}", namespaceId);
throw Throwables.propagate(e);
} catch (Exception e) {
LOG.error("Operation failed", e);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.data2.datafabric.dataset.service.mds.DatasetTypeMDS in project cdap by caskdata.
the class DatasetTypeService method deleteSystemModules.
private void deleteSystemModules() throws Exception {
final List<DatasetModuleMeta> toRevoke = new ArrayList<>();
Transactions.createTransactional(datasetCache).execute(60, new TxRunnable() {
@Override
public void run(DatasetContext context) throws Exception {
DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
Collection<DatasetModuleMeta> allDatasets = datasetTypeMDS.getModules(NamespaceId.SYSTEM);
for (DatasetModuleMeta ds : allDatasets) {
if (ds.getJarLocationPath() == null) {
LOG.debug("Deleting system dataset module: {}", ds.toString());
DatasetModuleId moduleId = NamespaceId.SYSTEM.datasetModule(ds.getName());
datasetTypeMDS.deleteModule(moduleId);
toRevoke.add(ds);
}
}
}
});
long startTime = System.currentTimeMillis();
LOG.trace("Revoking all privileges for {} system dataset modules. ", toRevoke.size());
for (DatasetModuleMeta ds : toRevoke) {
revokeAllPrivilegesOnModule(NamespaceId.SYSTEM.datasetModule(ds.getName()), ds);
}
long doneTime = System.currentTimeMillis();
float elapsedSeconds = doneTime == startTime ? 0.0F : ((float) doneTime - startTime) / 1000;
LOG.debug("Revoking all privileges for {} system dataset modules took {} seconds.", toRevoke.size(), elapsedSeconds);
}
use of co.cask.cdap.data2.datafabric.dataset.service.mds.DatasetTypeMDS in project cdap by caskdata.
the class DatasetTypeManager method addModule.
/**
* Add datasets module in a namespace
*
* @param datasetModuleId the {@link DatasetModuleId} to add
* @param className module class
* @param jarLocation location of the module jar
* @param force if true, an update will be allowed even if there are conflicts with other modules, or if
* removal of a type would break other modules' dependencies.
*/
public void addModule(final DatasetModuleId datasetModuleId, final String className, final Location jarLocation, final boolean force) throws DatasetModuleConflictException {
LOG.debug("adding module: {}, className: {}, jarLocation: {}", datasetModuleId, className, jarLocation == null ? "[local]" : jarLocation);
try {
final DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
final DatasetInstanceMDS datasetInstanceMDS = datasetCache.getDataset(DatasetMetaTableUtil.INSTANCE_TABLE_NAME);
txExecutorFactory.createExecutor(datasetCache).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// 1. get existing module with all its types
DatasetModuleMeta existing = datasetTypeMDS.getModule(datasetModuleId);
DependencyTrackingRegistry reg;
// 2. unpack jar and create class loader
File unpackedLocation = Files.createTempDirectory(Files.createDirectories(systemTempPath), datasetModuleId.getEntityName()).toFile();
DirectoryClassLoader cl = null;
try {
// NOTE: if jarLocation is null, we assume that this is a system module, ie. always present in classpath
if (jarLocation != null) {
BundleJarUtil.unJar(jarLocation, unpackedLocation);
cl = new DirectoryClassLoader(unpackedLocation, cConf.get(Constants.AppFabric.PROGRAM_EXTRA_CLASSPATH), FilterClassLoader.create(getClass().getClassLoader()), "lib");
}
reg = new DependencyTrackingRegistry(datasetModuleId, datasetTypeMDS, cl, force);
// 3. register the new module while tracking dependencies.
// this will fail if a type exists in a different module
DatasetDefinitionRegistries.register(className, cl, reg);
} catch (TypeConflictException e) {
// type conflict from the registry, we want to throw that as is
throw e;
} catch (Exception e) {
LOG.error("Could not instantiate instance of dataset module class {} for module {} using jarLocation {}", className, datasetModuleId, jarLocation);
throw Throwables.propagate(e);
} finally {
if (cl != null) {
// Close the ProgramClassLoader
Closeables.closeQuietly(cl);
}
try {
DirUtils.deleteDirectoryContents(unpackedLocation);
} catch (IOException e) {
LOG.warn("Failed to delete directory {}", unpackedLocation, e);
}
}
// 4. determine whether any type were removed from the module, and whether any other modules depend on them
if (existing != null) {
Set<String> removedTypes = new HashSet<>(existing.getTypes());
removedTypes.removeAll(reg.getTypes());
// TODO (CDAP-6294): track dependencies at the type level
if (!force && !removedTypes.isEmpty() && !existing.getUsedByModules().isEmpty()) {
throw new DatasetModuleConflictException(String.format("Cannot update module '%s' to remove types %s: Modules %s may depend on it. Delete them first", datasetModuleId, removedTypes, existing.getUsedByModules()));
}
Collection<DatasetSpecification> instances = datasetInstanceMDS.getByTypes(datasetModuleId.getParent(), removedTypes);
if (!instances.isEmpty()) {
throw new DatasetModuleConflictException(String.format("Attempt to remove dataset types %s from module '%s' that have existing instances: %s. " + "Delete them first.", removedTypes, datasetModuleId, Iterables.toString(Iterables.transform(instances, new Function<DatasetSpecification, String>() {
@Nullable
@Override
public String apply(@Nullable DatasetSpecification input) {
return input.getName() + ":" + input.getType();
}
}))));
}
}
// NOTE: we use set to avoid duplicated dependencies
// NOTE: we use LinkedHashSet to preserve order in which dependencies must be loaded
Set<String> moduleDependencies = new LinkedHashSet<String>();
for (DatasetTypeId usedType : reg.getUsedTypes()) {
DatasetModuleMeta usedModule = datasetTypeMDS.getModuleByType(usedType);
Preconditions.checkState(usedModule != null, String.format("Found a null used module for type %s for while adding module %s", usedType, datasetModuleId));
// adding all used types and the module itself, in this very order to keep the order of loading modules
// for instantiating a type
moduleDependencies.addAll(usedModule.getUsesModules());
boolean added = moduleDependencies.add(usedModule.getName());
if (added) {
// also adding this module as a dependent for all modules it uses
usedModule.addUsedByModule(datasetModuleId.getEntityName());
datasetTypeMDS.writeModule(usedType.getParent(), usedModule);
}
}
URI jarURI = jarLocation == null ? null : jarLocation.toURI();
DatasetModuleMeta moduleMeta = existing == null ? new DatasetModuleMeta(datasetModuleId.getEntityName(), className, jarURI, reg.getTypes(), Lists.newArrayList(moduleDependencies)) : new DatasetModuleMeta(datasetModuleId.getEntityName(), className, jarURI, reg.getTypes(), Lists.newArrayList(moduleDependencies), Lists.newArrayList(existing.getUsedByModules()));
datasetTypeMDS.writeModule(datasetModuleId.getParent(), moduleMeta);
}
});
} catch (TransactionFailureException e) {
Throwable cause = e.getCause();
if (cause != null) {
if (cause instanceof DatasetModuleConflictException) {
throw (DatasetModuleConflictException) cause;
} else if (cause instanceof TypeConflictException) {
throw new DatasetModuleConflictException(cause.getMessage(), cause);
}
}
throw Throwables.propagate(e);
} catch (Exception e) {
LOG.error("Operation failed", e);
throw Throwables.propagate(e);
}
}
Aggregations