use of co.cask.cdap.proto.DatasetTypeMeta in project cdap by caskdata.
the class DatasetTypeService method listTypes.
/**
* Lists all {@link DatasetType dataset types} in the specified {@link NamespaceId}.
*/
List<DatasetTypeMeta> listTypes(final NamespaceId namespaceId) throws Exception {
ensureNamespaceExists(namespaceId);
// Sorting by name for convenience
List<DatasetTypeMeta> allTypes = Lists.newArrayList(typeManager.getTypes(namespaceId));
Collections.sort(allTypes, new Comparator<DatasetTypeMeta>() {
@Override
public int compare(DatasetTypeMeta o1, DatasetTypeMeta o2) {
return o1.getName().compareTo(o2.getName());
}
});
Principal principal = authenticationContext.getPrincipal();
final Predicate<EntityId> authFilter = authorizationEnforcer.createFilter(principal);
Iterable<DatasetTypeMeta> authorizedDatasetTypes = Iterables.filter(allTypes, new com.google.common.base.Predicate<DatasetTypeMeta>() {
@Override
public boolean apply(DatasetTypeMeta datasetTypeMeta) {
DatasetTypeId datasetTypeId = namespaceId.datasetType(datasetTypeMeta.getName());
return authFilter.apply(datasetTypeId);
}
});
return Lists.newArrayList(authorizedDatasetTypes);
}
use of co.cask.cdap.proto.DatasetTypeMeta in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method create.
@POST
@Path("/data/datasets/{name}/admin/create")
public void create(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String name) {
propagateUserId(request);
InternalDatasetCreationParams params = GSON.fromJson(request.getContent().toString(Charsets.UTF_8), InternalDatasetCreationParams.class);
Preconditions.checkArgument(params.getProperties() != null, "Missing required 'instanceProps' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
DatasetProperties props = params.getProperties();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
DatasetId instanceId = new DatasetId(namespaceId, name);
DatasetSpecification spec = datasetAdminService.createOrUpdate(instanceId, typeMeta, props, null);
responder.sendJson(HttpResponseStatus.OK, spec);
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (Exception e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.proto.DatasetTypeMeta in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method update.
@POST
@Path("/data/datasets/{name}/admin/update")
public void update(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String name) {
propagateUserId(request);
InternalDatasetUpdateParams params = GSON.fromJson(request.getContent().toString(Charsets.UTF_8), InternalDatasetUpdateParams.class);
Preconditions.checkArgument(params.getProperties() != null, "Missing required 'instanceProps' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
Preconditions.checkArgument(params.getExistingSpec() != null, "Missing required 'existingSpec' parameter.");
DatasetProperties props = params.getProperties();
DatasetSpecification existing = params.getExistingSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
DatasetId instanceId = new DatasetId(namespaceId, name);
DatasetSpecification spec = datasetAdminService.createOrUpdate(instanceId, typeMeta, props, existing);
responder.sendJson(HttpResponseStatus.OK, spec);
} catch (NotFoundException e) {
LOG.debug("Got handler exception", e);
responder.sendString(HttpResponseStatus.NOT_FOUND, StringUtils.defaultIfEmpty(e.getMessage(), ""));
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (IncompatibleUpdateException e) {
responder.sendString(HttpResponseStatus.CONFLICT, e.getMessage());
} catch (Exception e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.proto.DatasetTypeMeta in project cdap by caskdata.
the class DatasetTypeService method getType.
/**
* Returns details of the specified {@link DatasetTypeId dataset type}.
*/
DatasetTypeMeta getType(DatasetTypeId datasetTypeId) throws Exception {
ensureNamespaceExists(datasetTypeId.getParent());
DatasetTypeMeta typeMeta = typeManager.getTypeInfo(datasetTypeId);
if (typeMeta == null) {
throw new DatasetTypeNotFoundException(datasetTypeId);
}
// TODO: Test if this can be removed
if (NamespaceId.SYSTEM.equals(datasetTypeId.getParent())) {
return typeMeta;
}
// only return the type if the user has some privileges on it
Principal principal = authenticationContext.getPrincipal();
Predicate<EntityId> authFilter = authorizationEnforcer.createFilter(principal);
if (!authFilter.apply(datasetTypeId)) {
throw new UnauthorizedException(principal, datasetTypeId);
}
return typeMeta;
}
use of co.cask.cdap.proto.DatasetTypeMeta in project cdap by caskdata.
the class InMemoryDatasetFramework method addModule.
@Override
public void addModule(DatasetModuleId moduleId, DatasetModule module) throws ModuleConflictException {
// TODO (CDAP-6297): check if existing modules overlap, or if this removes a type other modules depend on
writeLock.lock();
try {
DatasetDefinitionRegistry registry = registries.get(moduleId.getParent());
if (registry == null) {
registry = registryFactory.create();
registries.put(moduleId.getParent(), registry);
}
TypesTrackingRegistry trackingRegistry = new TypesTrackingRegistry(registry);
module.register(trackingRegistry);
String moduleClassName = DatasetModules.getDatasetModuleClass(module).getName();
moduleClasses.put(moduleId.getParent(), moduleId, moduleClassName);
List<String> types = trackingRegistry.getTypes();
nonDefaultTypes.putAll(moduleId.getParent(), types);
for (String type : types) {
this.types.put(moduleId.getParent().datasetType(type), new DatasetTypeMeta(type, Collections.singletonList(new DatasetModuleMeta(moduleId.getEntityName(), moduleClassName, null, types, Collections.<String>emptyList()))));
}
} finally {
writeLock.unlock();
}
}
Aggregations