Search in sources :

Example 16 with DatasetPath

use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.

the class ModelsController method deleteInternal.

private void deleteInternal(Users user, Project project, String path) throws DatasetException {
    DatasetPath datasetPath = datasetHelper.getDatasetPath(project, path, DatasetType.DATASET);
    datasetController.delete(project, user, datasetPath.getFullPath(), datasetPath.getDataset(), datasetPath.isTopLevelDataset());
}
Also used : DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath)

Example 17 with DatasetPath

use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.

the class DatasetBuilder method datasetSharedWithItems.

// create dto from a list of DatasetSharedWith objects
private DatasetDTO datasetSharedWithItems(DatasetDTO dto, UriInfo uriInfo, ResourceRequest resourceRequest, Project accessProject, Users user, List<DatasetSharedWith> datasetSharedWithList, String parentPath, Users dirOwner) throws DatasetException, MetadataException, SchematizedTagException {
    if (datasetSharedWithList != null && !datasetSharedWithList.isEmpty()) {
        for (DatasetSharedWith datasetSharedWith : datasetSharedWithList) {
            DatasetPath datasetPath = datasetHelper.getTopLevelDatasetPath(accessProject, datasetSharedWith);
            dto.addItem(buildItems(uriInfo, resourceRequest, user, datasetPath, parentPath, dirOwner));
        }
    }
    return dto;
}
Also used : DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath)

Example 18 with DatasetPath

use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.

the class DatasetBuilder method datasetItems.

// create dto from a list of dataset
private DatasetDTO datasetItems(DatasetDTO dto, UriInfo uriInfo, ResourceRequest resourceRequest, List<Dataset> datasets, Project accessProject, Users user, String parentPath, Users dirOwner) throws DatasetException, MetadataException, SchematizedTagException {
    if (datasets != null && !datasets.isEmpty()) {
        for (Dataset dataset : datasets) {
            DatasetPath datasetPath = datasetHelper.getTopLevelDatasetPath(accessProject, dataset);
            dto.addItem(buildItems(uriInfo, resourceRequest, user, datasetPath, parentPath, dirOwner));
        }
    }
    return dto;
}
Also used : Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath)

Example 19 with DatasetPath

use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.

the class DatasetResource method postByPath.

@POST
@Path("{path: .+}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Post an action on a file, dir or dataset.")
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_CREATE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
public Response postByPath(@Context UriInfo uriInfo, @Context SecurityContext sc, @Context HttpServletRequest req, @PathParam("path") String path, @QueryParam("type") DatasetType datasetType, @QueryParam("target_project") String targetProjectName, @QueryParam("action") DatasetActions.Post action, @QueryParam("description") String description, @QueryParam("searchable") Boolean searchable, @QueryParam("generate_readme") Boolean generateReadme, @QueryParam("destination_path") String destPath, @QueryParam("destination_type") DatasetType destDatasetType, @DefaultValue("READ_ONLY") @QueryParam("permission") DatasetAccessPermission permission) throws DatasetException, ProjectException, HopsSecurityException, ProvenanceException, MetadataException, SchematizedTagException, FeaturestoreException {
    Users user = jwtHelper.getUserPrincipal(sc);
    DatasetPath datasetPath;
    DatasetPath distDatasetPath;
    Project project = this.getProject();
    switch(action == null ? DatasetActions.Post.CREATE : action) {
        case CREATE:
            if (datasetType != null && !datasetType.equals(DatasetType.DATASET)) {
                // can only create dataset
                throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_INVALID, Level.FINE);
            }
            datasetPath = datasetHelper.getNewDatasetPath(project, path, DatasetType.DATASET);
            if (datasetPath.isTopLevelDataset()) {
                checkIfDataOwner(project, user);
            }
            if (datasetPath.isTopLevelDataset() && !datasetHelper.isBasicDatasetProjectParent(project, datasetPath)) {
                // fake shared dataset with :: in dataset name at dataset creation
                throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NAME_INVALID, Level.FINE);
            }
            ProvTypeDTO projectProvCore = fsProvenanceController.getMetaStatus(user, project, searchable);
            ResourceRequest resourceRequest;
            if (datasetPath.isTopLevelDataset()) {
                datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
                resourceRequest = new ResourceRequest(ResourceRequest.Name.DATASET);
                Dataset ds = datasetController.getByProjectAndFullPath(project, datasetPath.getFullPath().toString());
                datasetHelper.updateDataset(project, datasetPath, ds);
                datasetPath.setInode(ds.getInode());
                DatasetDTO dto = datasetBuilder.build(uriInfo, resourceRequest, user, datasetPath, null, null, false);
                return Response.created(dto.getHref()).entity(dto).build();
            } else {
                datasetHelper.checkIfDatasetExists(project, datasetPath);
                datasetHelper.updateDataset(project, datasetPath);
                datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
                resourceRequest = new ResourceRequest(ResourceRequest.Name.INODES);
                Inode inode = inodeController.getInodeAtPath(datasetPath.getFullPath().toString());
                datasetPath.setInode(inode);
                InodeDTO dto = inodeBuilder.buildStat(uriInfo, resourceRequest, user, datasetPath, inode);
                return Response.created(dto.getHref()).entity(dto).build();
            }
        case COPY:
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
            datasetController.copy(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
            break;
        case MOVE:
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
            datasetController.move(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
            break;
        case SHARE:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.share(targetProjectName, datasetPath.getFullPath().toString(), permission, project, user);
            break;
        case ACCEPT:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.acceptShared(project, user, datasetPath.getDatasetSharedWith());
            break;
        case ZIP:
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            if (destPath != null) {
                distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
                datasetController.zip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
            } else {
                datasetController.zip(project, user, datasetPath.getFullPath(), null);
            }
            break;
        case UNZIP:
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            if (destPath != null) {
                distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
                datasetController.unzip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
            } else {
                datasetController.unzip(project, user, datasetPath.getFullPath(), null);
            }
            break;
        case REJECT:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.rejectShared(datasetPath.getDatasetSharedWith());
            break;
        case PUBLISH:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.shareWithCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
            break;
        case UNPUBLISH:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.unshareFromCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
            break;
        case IMPORT:
            checkIfDataOwner(project, user);
            Project srcProject = projectController.findProjectByName(targetProjectName);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(srcProject, path, datasetType);
            datasetController.share(project.getName(), datasetPath.getFullPath().toString(), DatasetAccessPermission.READ_ONLY, srcProject, user);
            break;
        case UNSHARE_ALL:
            checkIfDataOwner(project, user);
            datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
            datasetController.unshareAll(datasetPath.getDataset(), user);
            break;
        default:
            throw new WebApplicationException("Action not valid.", Response.Status.NOT_FOUND);
    }
    return Response.noContent().build();
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) WebApplicationException(javax.ws.rs.WebApplicationException) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) InodeDTO(io.hops.hopsworks.api.dataset.inode.InodeDTO) Users(io.hops.hopsworks.persistence.entity.user.Users) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) ResourceRequest(io.hops.hopsworks.common.api.ResourceRequest) ProvTypeDTO(io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Path(javax.ws.rs.Path) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) ApiOperation(io.swagger.annotations.ApiOperation) ApiKeyRequired(io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles)

Example 20 with DatasetPath

use of io.hops.hopsworks.common.dataset.util.DatasetPath in project hopsworks by logicalclocks.

the class ExperimentsController method deleteInternal.

private void deleteInternal(Users user, Project project, String path) throws DatasetException {
    DatasetPath datasetPath = datasetHelper.getDatasetPath(project, path, DatasetType.DATASET);
    datasetController.delete(project, user, datasetPath.getFullPath(), datasetPath.getDataset(), datasetPath.isTopLevelDataset());
}
Also used : DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath)

Aggregations

DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)38 Users (io.hops.hopsworks.persistence.entity.user.Users)25 Produces (javax.ws.rs.Produces)25 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)24 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)24 ApiOperation (io.swagger.annotations.ApiOperation)23 Path (javax.ws.rs.Path)23 ApiKeyRequired (io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired)18 Project (io.hops.hopsworks.persistence.entity.project.Project)15 ResourceRequest (io.hops.hopsworks.common.api.ResourceRequest)13 GET (javax.ws.rs.GET)13 DatasetException (io.hops.hopsworks.exceptions.DatasetException)12 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)9 TagsDTO (io.hops.hopsworks.common.tags.TagsDTO)8 WebApplicationException (javax.ws.rs.WebApplicationException)7 DELETE (javax.ws.rs.DELETE)6 InodeDTO (io.hops.hopsworks.api.dataset.inode.InodeDTO)5 GenericException (io.hops.hopsworks.exceptions.GenericException)4 PUT (javax.ws.rs.PUT)4 Path (org.apache.hadoop.fs.Path)3