Search in sources :

Example 6 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ProjectService method getDatasetInfo.

@GET
@Path("{projectId}/getInodeInfo/{inodeId}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.ANYONE })
public Response getDatasetInfo(@PathParam("projectId") Integer projectId, @PathParam("inodeId") Long inodeId, @Context SecurityContext sc) throws ProjectException, DatasetException {
    Inode inode = inodes.findById(inodeId);
    if (inode == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "inodeId: " + inodeId);
    }
    Project project = projectFacade.find(projectId);
    if (project == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "projectId: " + projectId);
    }
    DataSetDTO dataset = new DataSetDTO(inode.getInodePK().getName(), inodeId, project);
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(dataset).build();
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) Project(io.hops.hopsworks.persistence.entity.project.Project) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) DataSetDTO(io.hops.hopsworks.common.dao.dataset.DataSetDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles)

Example 7 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ProjectProvenanceResource method usage.

@GET
@Path("usage")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_SCIENTIST, AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.PROJECT }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiOperation(value = "Artifact usage", response = ProvArtifactUsageParentDTO.class)
public Response usage(@QueryParam("artifact_id") String artifactId, @QueryParam("endpoint_id") Integer endpointId, @QueryParam("artifact_type") DatasetAccessType accessType, @BeanParam ProvUsageBeanParams params, @Context UriInfo uriInfo, @Context SecurityContext sc) throws ProvenanceException, GenericException, DatasetException, MetadataException, SchematizedTagException {
    Users user = jWTHelper.getUserPrincipal(sc);
    if (artifactId == null) {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "artifactId id cannot be null");
    }
    Project targetProject = project;
    if (endpointId != null) {
        targetProject = projectFacade.findById(endpointId).orElseThrow(() -> new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "target project not found"));
    }
    Dataset targetEndpoint;
    if (accessType != null) {
        try {
            switch(accessType) {
                case FEATUREGROUPS:
                    targetEndpoint = fsCtrl.getProjectFeaturestoreDataset(targetProject);
                    break;
                case TRAININGDATASETS:
                    String tdName = project.getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName();
                    targetEndpoint = datasetCtrl.getByName(targetProject, tdName);
                    break;
                case MODELS:
                    targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_MODELS_DATASET);
                    break;
                case EXPERIMENTS:
                    targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_EXPERIMENTS_DATASET);
                    break;
                default:
                    throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "access type not supports:" + accessType);
            }
        } catch (FeaturestoreException | DatasetException e) {
            throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "cannot access the dataset of the artifact");
        }
    } else {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "access type not defined");
    }
    DatasetPath targetEndpointPath = datasetHelper.getTopLevelDatasetPath(project, targetEndpoint);
    ProvArtifactUsageParentDTO status = usageBuilder.buildAccessible(uriInfo, user, targetEndpointPath, artifactId, params.getUsageType());
    return Response.ok().entity(status).build();
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) ProvArtifactUsageParentDTO(io.hops.hopsworks.api.provenance.ops.dto.ProvArtifactUsageParentDTO) Users(io.hops.hopsworks.persistence.entity.user.Users) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException) GenericException(io.hops.hopsworks.exceptions.GenericException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Path(javax.ws.rs.Path) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) ApiOperation(io.swagger.annotations.ApiOperation) ApiKeyRequired(io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles)

Example 8 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class PathValidator method buildProjectDsRelativePath.

private void buildProjectDsRelativePath(Project project, String[] pathComponents, DsPath dsPath) throws ProjectException, DatasetException {
    // Start by 1 as the first component is ""
    Project destProject = projectFacade.findByName(pathComponents[2]);
    if (project == null || destProject == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE);
    }
    Dataset ds = datasetController.getByProjectAndDsName(project, Utils.getProjectPath(pathComponents[2]), pathComponents[3]);
    if (ds == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE);
    }
    dsPath.setDs(ds);
    String dsRelativePathStr = buildRelativePath(pathComponents, 4, pathComponents.length);
    if (!dsRelativePathStr.isEmpty()) {
        dsPath.setDsRelativePath(new Path(dsRelativePathStr));
    }
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) Path(org.apache.hadoop.fs.Path) Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 9 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class PathValidator method buildFullPath.

private void buildFullPath(Project project, String path, DsPath dsPath) throws DatasetException {
    // Strip leading slashes.
    while (path.startsWith("/")) {
        path = path.substring(1);
    }
    String[] pathComponents = path.split(File.separator);
    String dsName = pathComponents[0];
    boolean shared = false;
    String parentProjectPath = null;
    if (pathComponents[0].contains(Settings.SHARED_FILE_SEPARATOR)) {
        // we can split the string and get the project name
        String[] shardDS = pathComponents[0].split(Settings.SHARED_FILE_SEPARATOR);
        if (shardDS.length != 2) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE);
        }
        parentProjectPath = Utils.getProjectPath(shardDS[0]);
        dsName = shardDS[1];
        shared = true;
    }
    Dataset ds = datasetController.getByProjectAndDsName(project, parentProjectPath, dsName);
    if (ds == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE);
    }
    // If the dataset is shared, make sure that the user can access it
    if (shared) {
        DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(project, ds);
        if (datasetSharedWith != null && !datasetSharedWith.getAccepted()) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_PENDING, Level.FINE, "datasetId: " + ds.getId());
        }
    }
    dsPath.setDs(ds);
    String dsRelativePathStr = buildRelativePath(pathComponents, 1, pathComponents.length);
    if (dsRelativePathStr.isEmpty()) {
        dsPath.setFullPath(datasetController.getDatasetPath(ds));
    } else {
        Path dsRelativePath = new Path(dsRelativePathStr);
        dsPath.setDsRelativePath(dsRelativePath);
        Path fullPath = new Path(datasetController.getDatasetPath(ds), dsRelativePath);
        dsPath.setFullPath(fullPath);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 10 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class PathValidator method buildHiveDsRelativePath.

private void buildHiveDsRelativePath(Project project, String[] pathComponents, DsPath dsPath) throws DatasetException {
    String dsPathStr = File.separator + buildRelativePath(pathComponents, 1, 5);
    Inode dsInode = inodeController.getInodeAtPath(dsPathStr);
    if (dsInode == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE);
    }
    Dataset originalDataset = datasetFacade.findByInode(dsInode);
    if (originalDataset == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE);
    }
    dsPath.setDs(originalDataset);
    String dsRelativePathStr = buildRelativePath(pathComponents, 5, pathComponents.length);
    if (!dsRelativePathStr.isEmpty()) {
        dsPath.setDsRelativePath(new Path(dsRelativePathStr));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Aggregations

DatasetException (io.hops.hopsworks.exceptions.DatasetException)61 IOException (java.io.IOException)25 Project (io.hops.hopsworks.persistence.entity.project.Project)23 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)21 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)18 ProjectException (io.hops.hopsworks.exceptions.ProjectException)12 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Produces (javax.ws.rs.Produces)12 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)11 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 Users (io.hops.hopsworks.persistence.entity.user.Users)10 Path (javax.ws.rs.Path)10 Path (org.apache.hadoop.fs.Path)10 GenericException (io.hops.hopsworks.exceptions.GenericException)9 AccessControlException (org.apache.hadoop.security.AccessControlException)8 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)7 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6