use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetInstanceService method drop.
/**
* Drops the specified dataset instance.
*
* @param instance the {@link DatasetId} to drop
* @throws NamespaceNotFoundException if the namespace was not found
* @throws DatasetNotFoundException if the dataset instance was not found
* @throws IOException if there was a problem in checking if the namespace exists over HTTP
* @throws UnauthorizedException if perimeter security and authorization are enabled, and the current user does not
* have {@link Action#ADMIN} privileges on the #instance
*/
void drop(DatasetId instance) throws Exception {
if (!DatasetsUtil.isSystemDatasetInUserNamespace(instance)) {
authorizationEnforcer.enforce(instance, authenticationContext.getPrincipal(), Action.ADMIN);
}
ensureNamespaceExists(instance.getParent());
DatasetSpecification spec = instanceManager.get(instance);
if (spec == null) {
throw new DatasetNotFoundException(instance);
}
dropDataset(instance, spec);
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetInstanceService method update.
/**
* Updates an existing Dataset specification properties.
* {@link DatasetInstanceConfiguration} is constructed based on request and the Dataset instance is updated.
*
* @param instance the dataset instance
* @param properties the dataset properties to be used
* @throws NamespaceNotFoundException if the specified namespace was not found
* @throws DatasetNotFoundException if the dataset was not found
* @throws DatasetTypeNotFoundException if the type of the existing dataset was not found
* @throws UnauthorizedException if perimeter security and authorization are enabled, and the current user does not
* have {@link Action#ADMIN} privilege on the #instance
*/
void update(DatasetId instance, Map<String, String> properties) throws Exception {
ensureNamespaceExists(instance.getParent());
if (!DatasetsUtil.isSystemDatasetInUserNamespace(instance)) {
authorizationEnforcer.enforce(instance, authenticationContext.getPrincipal(), Action.ADMIN);
}
DatasetSpecification existing = instanceManager.get(instance);
if (existing == null) {
throw new DatasetNotFoundException(instance);
}
LOG.info("Update dataset {}, properties: {}", instance.getEntityName(), ConversionHelpers.toJson(properties));
// by pass the auth check for dataset type when the operation is not creation
DatasetTypeMeta typeMeta = getTypeInfo(instance.getParent(), existing.getType(), true);
if (typeMeta == null) {
// Type not found in the instance's namespace and the system namespace. Bail out.
throw new DatasetTypeNotFoundException(ConversionHelpers.toDatasetTypeId(instance.getParent(), existing.getType()));
}
// Note how we execute configure() via opExecutorClient (outside of ds service) to isolate running user code
DatasetProperties datasetProperties = DatasetProperties.of(properties);
DatasetSpecification spec = opExecutorClient.update(instance, typeMeta, datasetProperties, existing);
instanceManager.add(instance.getParent(), spec);
metaCache.invalidate(instance);
updateExplore(instance, datasetProperties, existing, spec);
publishAudit(instance, AuditType.UPDATE);
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method update.
@POST
@Path("/data/datasets/{name}/admin/update")
public void update(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String name) {
propagateUserId(request);
InternalDatasetUpdateParams params = GSON.fromJson(request.content().toString(StandardCharsets.UTF_8), InternalDatasetUpdateParams.class);
Preconditions.checkArgument(params.getProperties() != null, "Missing required 'instanceProps' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
Preconditions.checkArgument(params.getExistingSpec() != null, "Missing required 'existingSpec' parameter.");
DatasetProperties props = params.getProperties();
DatasetSpecification existing = params.getExistingSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
DatasetId instanceId = new DatasetId(namespaceId, name);
DatasetSpecification spec = datasetAdminService.createOrUpdate(instanceId, typeMeta, props, existing);
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(spec));
} catch (NotFoundException e) {
LOG.debug("Got handler exception", e);
responder.sendString(HttpResponseStatus.NOT_FOUND, StringUtils.defaultIfEmpty(e.getMessage(), ""));
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (IncompatibleUpdateException e) {
responder.sendString(HttpResponseStatus.CONFLICT, e.getMessage());
} catch (Exception e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method drop.
@POST
@Path("/data/datasets/{name}/admin/drop")
public void drop(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String instanceName) throws Exception {
propagateUserId(request);
InternalDatasetDropParams params = GSON.fromJson(request.content().toString(StandardCharsets.UTF_8), InternalDatasetDropParams.class);
Preconditions.checkArgument(params.getInstanceSpec() != null, "Missing required 'instanceSpec' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
DatasetSpecification spec = params.getInstanceSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
datasetAdminService.drop(new DatasetId(namespaceId, instanceName), typeMeta, spec);
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(spec));
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetAdminService method createOrUpdate.
/**
* Configures and creates a Dataset
*
* @param datasetInstanceId dataset instance to be created
* @param typeMeta type meta for the dataset
* @param props dataset instance properties
* @param existing if dataset already exists (in case of update), the existing properties
* @return dataset specification
*/
public DatasetSpecification createOrUpdate(final DatasetId datasetInstanceId, final DatasetTypeMeta typeMeta, final DatasetProperties props, @Nullable final DatasetSpecification existing) throws Exception {
if (existing == null) {
LOG.info("Creating dataset instance {}, type meta: {}", datasetInstanceId, typeMeta);
} else {
LOG.info("Updating dataset instance {}, type meta: {}, existing: {}", datasetInstanceId, typeMeta, existing);
}
try (DatasetClassLoaderProvider classLoaderProvider = new DirectoryClassLoaderProvider(cConf, locationFactory)) {
final DatasetContext context = DatasetContext.from(datasetInstanceId.getNamespace());
UserGroupInformation ugi = getUgiForDataset(impersonator, datasetInstanceId);
final DatasetType type = ImpersonationUtils.doAs(ugi, () -> {
DatasetType type1 = dsFramework.getDatasetType(typeMeta, null, classLoaderProvider);
if (type1 == null) {
throw new BadRequestException(String.format("Cannot instantiate dataset type using provided type meta: %s", typeMeta));
}
return type1;
});
DatasetSpecification spec = ImpersonationUtils.doAs(ugi, () -> {
DatasetSpecification spec1 = existing == null ? type.configure(datasetInstanceId.getEntityName(), props) : type.reconfigure(datasetInstanceId.getEntityName(), props, existing);
DatasetAdmin admin = type.getAdmin(context, spec1);
try {
if (existing != null) {
if (admin instanceof Updatable) {
((Updatable) admin).update(existing);
} else {
admin.upgrade();
}
} else {
admin.create();
}
} finally {
Closeables.closeQuietly(admin);
}
return spec1;
});
// Writing system metadata should be done without impersonation since user may not have access to system tables.
writeSystemMetadata(datasetInstanceId, spec, props, typeMeta, type, context, existing != null, ugi);
return spec;
} catch (Exception e) {
if (e instanceof IncompatibleUpdateException) {
// this is expected to happen if user provides bad update properties, so we log this as debug
LOG.debug("Incompatible update for dataset '{}'", datasetInstanceId, e);
} else {
LOG.error("Error {} dataset '{}': {}", existing == null ? "creating" : "updating", datasetInstanceId, e.getMessage(), e);
}
throw e;
}
}
Aggregations