use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.
the class ModelsController method deleteInternal.
private void deleteInternal(Users user, Project project, String path) throws DatasetException {
DatasetPath datasetPath = datasetHelper.getDatasetPath(project, path, DatasetType.DATASET);
datasetController.delete(project, user, datasetPath.getFullPath(), datasetPath.getDataset(), datasetPath.isTopLevelDataset());
}
use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.
the class DatasetBuilder method datasetSharedWithItems.
// create dto from a list of DatasetSharedWith objects
private DatasetDTO datasetSharedWithItems(DatasetDTO dto, UriInfo uriInfo, ResourceRequest resourceRequest, Project accessProject, Users user, List<DatasetSharedWith> datasetSharedWithList, String parentPath, Users dirOwner) throws DatasetException, MetadataException, SchematizedTagException {
if (datasetSharedWithList != null && !datasetSharedWithList.isEmpty()) {
for (DatasetSharedWith datasetSharedWith : datasetSharedWithList) {
DatasetPath datasetPath = datasetHelper.getTopLevelDatasetPath(accessProject, datasetSharedWith);
dto.addItem(buildItems(uriInfo, resourceRequest, user, datasetPath, parentPath, dirOwner));
}
}
return dto;
}
use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.
the class DatasetBuilder method datasetItems.
// create dto from a list of dataset
private DatasetDTO datasetItems(DatasetDTO dto, UriInfo uriInfo, ResourceRequest resourceRequest, List<Dataset> datasets, Project accessProject, Users user, String parentPath, Users dirOwner) throws DatasetException, MetadataException, SchematizedTagException {
if (datasets != null && !datasets.isEmpty()) {
for (Dataset dataset : datasets) {
DatasetPath datasetPath = datasetHelper.getTopLevelDatasetPath(accessProject, dataset);
dto.addItem(buildItems(uriInfo, resourceRequest, user, datasetPath, parentPath, dirOwner));
}
}
return dto;
}
use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.
the class DatasetResource method postByPath.
@POST
@Path("{path: .+}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Post an action on a file, dir or dataset.")
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_CREATE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
public Response postByPath(@Context UriInfo uriInfo, @Context SecurityContext sc, @Context HttpServletRequest req, @PathParam("path") String path, @QueryParam("type") DatasetType datasetType, @QueryParam("target_project") String targetProjectName, @QueryParam("action") DatasetActions.Post action, @QueryParam("description") String description, @QueryParam("searchable") Boolean searchable, @QueryParam("generate_readme") Boolean generateReadme, @QueryParam("destination_path") String destPath, @QueryParam("destination_type") DatasetType destDatasetType, @DefaultValue("READ_ONLY") @QueryParam("permission") DatasetAccessPermission permission) throws DatasetException, ProjectException, HopsSecurityException, ProvenanceException, MetadataException, SchematizedTagException, FeaturestoreException {
Users user = jwtHelper.getUserPrincipal(sc);
DatasetPath datasetPath;
DatasetPath distDatasetPath;
Project project = this.getProject();
switch(action == null ? DatasetActions.Post.CREATE : action) {
case CREATE:
if (datasetType != null && !datasetType.equals(DatasetType.DATASET)) {
// can only create dataset
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_INVALID, Level.FINE);
}
datasetPath = datasetHelper.getNewDatasetPath(project, path, DatasetType.DATASET);
if (datasetPath.isTopLevelDataset()) {
checkIfDataOwner(project, user);
}
if (datasetPath.isTopLevelDataset() && !datasetHelper.isBasicDatasetProjectParent(project, datasetPath)) {
// fake shared dataset with :: in dataset name at dataset creation
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NAME_INVALID, Level.FINE);
}
ProvTypeDTO projectProvCore = fsProvenanceController.getMetaStatus(user, project, searchable);
ResourceRequest resourceRequest;
if (datasetPath.isTopLevelDataset()) {
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.DATASET);
Dataset ds = datasetController.getByProjectAndFullPath(project, datasetPath.getFullPath().toString());
datasetHelper.updateDataset(project, datasetPath, ds);
datasetPath.setInode(ds.getInode());
DatasetDTO dto = datasetBuilder.build(uriInfo, resourceRequest, user, datasetPath, null, null, false);
return Response.created(dto.getHref()).entity(dto).build();
} else {
datasetHelper.checkIfDatasetExists(project, datasetPath);
datasetHelper.updateDataset(project, datasetPath);
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.INODES);
Inode inode = inodeController.getInodeAtPath(datasetPath.getFullPath().toString());
datasetPath.setInode(inode);
InodeDTO dto = inodeBuilder.buildStat(uriInfo, resourceRequest, user, datasetPath, inode);
return Response.created(dto.getHref()).entity(dto).build();
}
case COPY:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.copy(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case MOVE:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.move(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case SHARE:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.share(targetProjectName, datasetPath.getFullPath().toString(), permission, project, user);
break;
case ACCEPT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.acceptShared(project, user, datasetPath.getDatasetSharedWith());
break;
case ZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.zip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.zip(project, user, datasetPath.getFullPath(), null);
}
break;
case UNZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.unzip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.unzip(project, user, datasetPath.getFullPath(), null);
}
break;
case REJECT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.rejectShared(datasetPath.getDatasetSharedWith());
break;
case PUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.shareWithCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case UNPUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareFromCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case IMPORT:
checkIfDataOwner(project, user);
Project srcProject = projectController.findProjectByName(targetProjectName);
datasetPath = datasetHelper.getDatasetPathIfFileExist(srcProject, path, datasetType);
datasetController.share(project.getName(), datasetPath.getFullPath().toString(), DatasetAccessPermission.READ_ONLY, srcProject, user);
break;
case UNSHARE_ALL:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareAll(datasetPath.getDataset(), user);
break;
default:
throw new WebApplicationException("Action not valid.", Response.Status.NOT_FOUND);
}
return Response.noContent().build();
}
use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.
the class ExperimentsController method deleteInternal.
private void deleteInternal(Users user, Project project, String path) throws DatasetException {
DatasetPath datasetPath = datasetHelper.getDatasetPath(project, path, DatasetType.DATASET);
datasetController.delete(project, user, datasetPath.getFullPath(), datasetPath.getDataset(), datasetPath.isTopLevelDataset());
}
Aggregations