use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class InMemoryDatasetOpExecutor method update.
@Override
public DatasetSpecification update(DatasetId datasetInstanceId, DatasetTypeMeta typeMeta, DatasetProperties props, DatasetSpecification existing) throws Exception {
DatasetType type = client.getDatasetType(typeMeta, null, new ConstantClassLoaderProvider());
if (type == null) {
throw new IllegalArgumentException("Dataset type cannot be instantiated for provided type meta: " + typeMeta);
}
try {
DatasetSpecification spec = type.reconfigure(datasetInstanceId.getEntityName(), props, existing);
DatasetAdmin admin = type.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec);
if (admin instanceof Updatable) {
((Updatable) admin).update(existing);
} else {
admin.create();
}
if (spec.getDescription() == null && existing.getDescription() != null) {
spec.setDescription(existing.getDescription());
}
return spec;
} catch (IncompatibleUpdateException e) {
throw new ConflictException(e.getMessage());
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetTypeManager method addModule.
/**
* Add datasets module in a namespace
*
* @param datasetModuleId the {@link DatasetModuleId} to add
* @param className module class
* @param jarLocation location of the module jar
* @param force if true, an update will be allowed even if there are conflicts with other modules, or if
* removal of a type would break other modules' dependencies.
*/
public void addModule(final DatasetModuleId datasetModuleId, final String className, final Location jarLocation, final boolean force) throws DatasetModuleConflictException {
LOG.debug("adding module: {}, className: {}, jarLocation: {}", datasetModuleId, className, jarLocation == null ? "[local]" : jarLocation);
try {
final DatasetTypeMDS datasetTypeMDS = datasetCache.getDataset(DatasetMetaTableUtil.META_TABLE_NAME);
final DatasetInstanceMDS datasetInstanceMDS = datasetCache.getDataset(DatasetMetaTableUtil.INSTANCE_TABLE_NAME);
txExecutorFactory.createExecutor(datasetCache).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// 1. get existing module with all its types
DatasetModuleMeta existing = datasetTypeMDS.getModule(datasetModuleId);
DependencyTrackingRegistry reg;
// 2. unpack jar and create class loader
File unpackedLocation = Files.createTempDirectory(Files.createDirectories(systemTempPath), datasetModuleId.getEntityName()).toFile();
DirectoryClassLoader cl = null;
try {
// NOTE: if jarLocation is null, we assume that this is a system module, ie. always present in classpath
if (jarLocation != null) {
BundleJarUtil.unJar(jarLocation, unpackedLocation);
cl = new DirectoryClassLoader(unpackedLocation, cConf.get(Constants.AppFabric.PROGRAM_EXTRA_CLASSPATH), FilterClassLoader.create(getClass().getClassLoader()), "lib");
}
reg = new DependencyTrackingRegistry(datasetModuleId, datasetTypeMDS, cl, force);
// 3. register the new module while tracking dependencies.
// this will fail if a type exists in a different module
DatasetDefinitionRegistries.register(className, cl, reg);
} catch (TypeConflictException e) {
// type conflict from the registry, we want to throw that as is
throw e;
} catch (Exception e) {
LOG.error("Could not instantiate instance of dataset module class {} for module {} using jarLocation {}", className, datasetModuleId, jarLocation);
throw Throwables.propagate(e);
} finally {
if (cl != null) {
// Close the ProgramClassLoader
Closeables.closeQuietly(cl);
}
try {
DirUtils.deleteDirectoryContents(unpackedLocation);
} catch (IOException e) {
LOG.warn("Failed to delete directory {}", unpackedLocation, e);
}
}
// 4. determine whether any type were removed from the module, and whether any other modules depend on them
if (existing != null) {
Set<String> removedTypes = new HashSet<>(existing.getTypes());
removedTypes.removeAll(reg.getTypes());
// TODO (CDAP-6294): track dependencies at the type level
if (!force && !removedTypes.isEmpty() && !existing.getUsedByModules().isEmpty()) {
throw new DatasetModuleConflictException(String.format("Cannot update module '%s' to remove types %s: Modules %s may depend on it. Delete them first", datasetModuleId, removedTypes, existing.getUsedByModules()));
}
Collection<DatasetSpecification> instances = datasetInstanceMDS.getByTypes(datasetModuleId.getParent(), removedTypes);
if (!instances.isEmpty()) {
throw new DatasetModuleConflictException(String.format("Attempt to remove dataset types %s from module '%s' that have existing instances: %s. " + "Delete them first.", removedTypes, datasetModuleId, Iterables.toString(Iterables.transform(instances, new Function<DatasetSpecification, String>() {
@Nullable
@Override
public String apply(@Nullable DatasetSpecification input) {
return input.getName() + ":" + input.getType();
}
}))));
}
}
// NOTE: we use set to avoid duplicated dependencies
// NOTE: we use LinkedHashSet to preserve order in which dependencies must be loaded
Set<String> moduleDependencies = new LinkedHashSet<String>();
for (DatasetTypeId usedType : reg.getUsedTypes()) {
DatasetModuleMeta usedModule = datasetTypeMDS.getModuleByType(usedType);
Preconditions.checkState(usedModule != null, String.format("Found a null used module for type %s for while adding module %s", usedType, datasetModuleId));
// adding all used types and the module itself, in this very order to keep the order of loading modules
// for instantiating a type
moduleDependencies.addAll(usedModule.getUsesModules());
boolean added = moduleDependencies.add(usedModule.getName());
if (added) {
// also adding this module as a dependent for all modules it uses
usedModule.addUsedByModule(datasetModuleId.getEntityName());
datasetTypeMDS.writeModule(usedType.getParent(), usedModule);
}
}
URI jarURI = jarLocation == null ? null : jarLocation.toURI();
DatasetModuleMeta moduleMeta = existing == null ? new DatasetModuleMeta(datasetModuleId.getEntityName(), className, jarURI, reg.getTypes(), Lists.newArrayList(moduleDependencies)) : new DatasetModuleMeta(datasetModuleId.getEntityName(), className, jarURI, reg.getTypes(), Lists.newArrayList(moduleDependencies), Lists.newArrayList(existing.getUsedByModules()));
datasetTypeMDS.writeModule(datasetModuleId.getParent(), moduleMeta);
}
});
} catch (TransactionFailureException e) {
Throwable cause = e.getCause();
if (cause != null) {
if (cause instanceof DatasetModuleConflictException) {
throw (DatasetModuleConflictException) cause;
} else if (cause instanceof TypeConflictException) {
throw new DatasetModuleConflictException(cause.getMessage(), cause);
}
}
throw Throwables.propagate(e);
} catch (Exception e) {
LOG.error("Operation failed", e);
throw Throwables.propagate(e);
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetType method configure.
public DatasetSpecification configure(String instanceName, DatasetProperties properties) {
DatasetSpecification spec = delegate.configure(instanceName, properties);
spec = spec.setOriginalProperties(properties);
if (properties.getDescription() != null) {
spec = spec.setDescription(properties.getDescription());
}
return spec;
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class InMemoryDatasetFramework method getAdmin.
@Nullable
@Override
public <T extends DatasetAdmin> T getAdmin(DatasetId datasetInstanceId, @Nullable ClassLoader classLoader, DatasetClassLoaderProvider classLoaderProvider) throws IOException {
readLock.lock();
try {
DatasetSpecification spec = instances.get(datasetInstanceId.getParent(), datasetInstanceId);
if (spec == null) {
return null;
}
LinkedHashSet<String> availableModuleClasses = getAvailableModuleClasses(datasetInstanceId.getParent());
DatasetDefinition impl = createRegistry(availableModuleClasses, classLoader).get(spec.getType());
return (T) impl.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, classLoader);
} finally {
readLock.unlock();
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class InMemoryDatasetFramework method deleteAllInstances.
@Override
public void deleteAllInstances(NamespaceId namespaceId) throws DatasetManagementException, IOException {
writeLock.lock();
try {
for (DatasetSpecification spec : instances.row(namespaceId).values()) {
DatasetDefinition def = getDefinitionForType(namespaceId, spec.getType());
if (def == null) {
throw new DatasetManagementException(String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", spec.getType(), namespaceId));
}
def.getAdmin(DatasetContext.from(namespaceId.getEntityName()), spec, null).drop();
publishAudit(namespaceId.dataset(spec.getName()), AuditType.DELETE);
}
instances.row(namespaceId).clear();
} finally {
writeLock.unlock();
}
}
Aggregations