use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ProjectService method getReadmeByInodeId.
@GET
@Path("/readme/byInodeId/{inodeId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getReadmeByInodeId(@PathParam("inodeId") Long inodeId, @Context SecurityContext sc) throws DatasetException {
if (inodeId == null) {
throw new IllegalArgumentException("No inodeId provided.");
}
Inode inode = inodes.findById(inodeId);
Inode parent = inodes.findParent(inode);
Project proj = projectFacade.findByName(parent.getInodePK().getName());
Dataset ds = datasetFacade.findByProjectAndInode(proj, inode);
if (ds != null && !ds.isSearchable()) {
throw new DatasetException(RESTCodes.DatasetErrorCode.README_NOT_ACCESSIBLE, Level.FINE);
}
DistributedFileSystemOps dfso = dfs.getDfsOps();
FilePreviewDTO filePreviewDTO;
String path = inodeController.getPath(inode);
try {
filePreviewDTO = datasetController.getReadme(path + "/README.md", dfso);
} catch (IOException ex) {
filePreviewDTO = new FilePreviewDTO();
filePreviewDTO.setContent("No README file found for this dataset.");
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(filePreviewDTO).build();
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(filePreviewDTO).build();
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ProvUsageBuilder method buildAccessible.
public ProvArtifactUsageParentDTO buildAccessible(UriInfo uriInfo, Users user, DatasetPath targetEndpoint, String artifactId, Set<ProvUsageType> type) throws ProvenanceException, GenericException, DatasetException, MetadataException, SchematizedTagException {
if (!accessController.hasAccess(targetEndpoint.getAccessProject(), targetEndpoint.getDataset())) {
throw new GenericException(RESTCodes.GenericErrorCode.NOT_AUTHORIZED_TO_ACCESS, Level.FINE);
}
ProvArtifactUsageParentDTO usage = new ProvArtifactUsageParentDTO();
usage.setArtifactId(artifactId);
DatasetDTO datasetDTO = datasetBuilder.build(uriInfo, new ResourceRequest(ResourceRequest.Name.DATASET), user, targetEndpoint);
usage.setDataset(datasetDTO);
usage.setProjectId(targetEndpoint.getDataset().getProject().getId());
usage.setProjectName(targetEndpoint.getDataset().getProject().getName());
ProvOpsParamBuilder params = getBasicUsageOpsParams(targetEndpoint.getDataset(), artifactId);
ProvOpsDTO ops = opsBuilder.build(targetEndpoint.getDataset().getProject(), params, ProvOpsReturnType.AGGREGATIONS);
Optional<ProvOpsDTO> aggregation = ops.getItems().stream().filter(agg -> agg.getAggregation() != null && agg.getAggregation().equals(ProvOpsAggregations.APP_USAGE.toString())).findFirst();
if (!aggregation.isPresent()) {
return usage;
}
Optional<ProvOpsDTO> artifact = aggregation.get().getItems().stream().filter(art -> art.getMlId().equals(artifactId)).findFirst();
if (!artifact.isPresent()) {
return usage;
}
for (ProvUsageType t : type) {
switch(t) {
case READ_CURRENT:
usage.setReadCurrent(usage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA, true));
break;
case WRITE_CURRENT:
usage.setWriteCurrent(usage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA, true));
break;
case READ_LAST:
lastUsage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA).ifPresent(usage::setReadLast);
break;
case WRITE_LAST:
lastUsage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA).ifPresent(usage::setWriteLast);
break;
case READ_HISTORY:
usage.setReadHistory(usage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA, false));
break;
case WRITE_HISTORY:
usage.setWriteHistory(usage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA, false));
break;
}
}
return usage;
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class TrainingDatasetService method provenance.
@Path("/{trainingDatasetId}/provenance")
public ProvArtifactResource provenance(@PathParam("trainingDatasetId") Integer trainingDatasetId) throws FeaturestoreException, GenericException {
String tdName = featurestore.getProject().getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName();
DatasetPath targetEndpointPath;
try {
Dataset targetEndpoint = datasetController.getByName(featurestore.getProject(), tdName);
targetEndpointPath = datasetHelper.getTopLevelDatasetPath(project, targetEndpoint);
} catch (DatasetException ex) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "training dataset not found");
}
this.provenanceResource.setContext(project, targetEndpointPath);
TrainingDataset td = trainingDatasetController.getTrainingDatasetById(featurestore, trainingDatasetId);
this.provenanceResource.setArtifactId(td.getName(), td.getVersion());
return provenanceResource;
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class FeaturegroupService method provenance.
@Path("/{featureGroupId}/provenance")
public ProvArtifactResource provenance(@PathParam("featureGroupId") Integer featureGroupId) throws FeaturestoreException, GenericException {
DatasetPath targetEndpointPath;
try {
Dataset targetEndpoint = featurestoreController.getProjectFeaturestoreDataset(featurestore.getProject());
targetEndpointPath = datasetHelper.getTopLevelDatasetPath(project, targetEndpoint);
} catch (DatasetException ex) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "training dataset not found");
}
this.provenanceResource.setContext(project, targetEndpointPath);
Featuregroup fg = featuregroupController.getFeaturegroupById(featurestore, featureGroupId);
this.provenanceResource.setArtifactId(fg.getName(), fg.getVersion());
return provenanceResource;
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method changePermissions.
public void changePermissions(Dataset ds, PermissionTransition permissionTransition, Project targetProject) throws DatasetException {
if (permissionTransition.noop()) {
datasetFacade.update(ds);
return;
}
ds.setPermission(permissionTransition.getTo());
try {
hdfsUsersController.changePermission(ds, targetProject, permissionTransition);
} catch (IOException e) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_PERMISSION_ERROR, Level.WARNING, "dataset: " + ds.getName(), e.getMessage(), e);
}
datasetFacade.update(ds);
}
Aggregations