Search in sources :

Example 11 with Dataset

use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.

the class FeaturestoreHopsfsConnectorController method updateFeaturestoreHopsfsConnector.

@TransactionAttribute(TransactionAttributeType.REQUIRED)
@Transactional(rollbackOn = FeaturestoreException.class)
public FeaturestoreHopsfsConnector updateFeaturestoreHopsfsConnector(Featurestore featurestore, FeaturestoreHopsfsConnectorDTO featurestoreHopsfsConnectorDTO, FeaturestoreHopsfsConnector featurestoreHopsfsConnector) throws FeaturestoreException {
    if (!Strings.isNullOrEmpty(featurestoreHopsfsConnectorDTO.getDatasetName())) {
        Dataset dataset = verifyHopsfsConnectorDatasetName(featurestoreHopsfsConnectorDTO.getDatasetName(), featurestore);
        featurestoreHopsfsConnector.setHopsfsDataset(dataset);
    }
    return featurestoreHopsfsConnector;
}
Also used : Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) TransactionAttribute(javax.ejb.TransactionAttribute) Transactional(javax.transaction.Transactional)

Example 12 with Dataset

use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.

the class ExperimentsBuilder method verifyExperimentsEndpoint.

private ExperimentsEndpointDTO verifyExperimentsEndpoint(Project userProject, String sEndpointId) throws GenericException, DatasetException {
    Integer endpointId;
    try {
        endpointId = Integer.parseInt(sEndpointId);
    } catch (NumberFormatException e) {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided Endpoint Id was malformed - expected a Integer ", e.getMessage(), e);
    }
    Project sharingProject = projectFacade.findById(endpointId).orElseThrow(() -> new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided project cannot be accessed"));
    Dataset dataset = datasetCtrl.getByName(sharingProject, Settings.HOPS_EXPERIMENTS_DATASET);
    if (dataset != null && accessCtrl.hasAccess(userProject, dataset)) {
        return ExperimentsEndpointDTO.fromDataset(dataset);
    }
    throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided Endpoint cannot be accessed");
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) GenericException(io.hops.hopsworks.exceptions.GenericException)

Example 13 with Dataset

use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.

the class ExperimentsResource method getModelsProjectAndCheckAccess.

private Project getModelsProjectAndCheckAccess(ExperimentDTO experimentDTO) throws ProjectException, DatasetException, GenericException {
    Project modelProject;
    if (experimentDTO.getModelProjectName() == null) {
        modelProject = project;
    } else {
        modelProject = projectFacade.findByName(experimentDTO.getModelProjectName());
        if (modelProject == null) {
            throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.INFO, "model project not found for experiment");
        }
    }
    Dataset modelDataset = datasetCtrl.getByName(modelProject, Settings.HOPS_MODELS_DATASET);
    if (!accessCtrl.hasAccess(project, modelDataset)) {
        throw new GenericException(RESTCodes.GenericErrorCode.NOT_AUTHORIZED_TO_ACCESS, Level.INFO);
    }
    return modelProject;
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) GenericException(io.hops.hopsworks.exceptions.GenericException)

Example 14 with Dataset

use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.

the class DsUpdateOperations method deleteDatasetFile.

/**
 * Deletes a file inside a top-level dataset
 *
 * @param project  the project of the user making the request
 * @param user     the user making the request
 * @param fileName the name of the folder or file to remove
 * @return the fullpath of the deleted file
 * @throws DatasetException
 * @throws ProjectException
 */
public org.apache.hadoop.fs.Path deleteDatasetFile(Project project, Users user, String fileName) throws DatasetException, ProjectException, HopsSecurityException, UnsupportedEncodingException {
    boolean success = false;
    DistributedFileSystemOps dfso = null;
    DsPath dsPath = pathValidator.validatePath(project, fileName);
    Dataset ds = dsPath.getDs();
    org.apache.hadoop.fs.Path fullPath = dsPath.getFullPath();
    org.apache.hadoop.fs.Path dsRelativePath = dsPath.getDsRelativePath();
    if (dsRelativePath.depth() == 0) {
        throw new IllegalArgumentException("Use endpoint DELETE /{datasetName} to delete top level dataset)");
    }
    try {
        String username = hdfsUsersBean.getHdfsUserName(project, user);
        // If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
        // Find project of dataset as it might be shared
        Project owning = datasetController.getOwningProject(ds);
        boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
        if (isMember && projectTeamFacade.findCurrentRole(owning, user).equals(AllowedProjectRoles.DATA_OWNER) && owning.equals(project)) {
            // do it as super user
            dfso = dfs.getDfsOps();
        } else {
            // do it as project user
            dfso = dfs.getDfsOps(username);
        }
        success = dfso.rm(fullPath, true);
    } catch (AccessControlException ex) {
        throw new HopsSecurityException(RESTCodes.SecurityErrorCode.HDFS_ACCESS_CONTROL, Level.FINE, "Operation: delete, path: " + fullPath.toString(), ex.getMessage(), ex);
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.SEVERE, "path: " + fullPath.toString(), ex.getMessage(), ex);
    } finally {
        if (dfso != null) {
            dfs.closeDfsClient(dfso);
        }
    }
    if (!success) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.FINE, "path: " + fullPath.toString());
    }
    return fullPath;
}
Also used : Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Project(io.hops.hopsworks.persistence.entity.project.Project)

Example 15 with Dataset

use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.

the class ProjectProvenanceResource method usage.

@GET
@Path("usage")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_SCIENTIST, AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.PROJECT }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiOperation(value = "Artifact usage", response = ProvArtifactUsageParentDTO.class)
public Response usage(@QueryParam("artifact_id") String artifactId, @QueryParam("endpoint_id") Integer endpointId, @QueryParam("artifact_type") DatasetAccessType accessType, @BeanParam ProvUsageBeanParams params, @Context UriInfo uriInfo, @Context SecurityContext sc) throws ProvenanceException, GenericException, DatasetException, MetadataException, SchematizedTagException {
    Users user = jWTHelper.getUserPrincipal(sc);
    if (artifactId == null) {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "artifactId id cannot be null");
    }
    Project targetProject = project;
    if (endpointId != null) {
        targetProject = projectFacade.findById(endpointId).orElseThrow(() -> new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "target project not found"));
    }
    Dataset targetEndpoint;
    if (accessType != null) {
        try {
            switch(accessType) {
                case FEATUREGROUPS:
                    targetEndpoint = fsCtrl.getProjectFeaturestoreDataset(targetProject);
                    break;
                case TRAININGDATASETS:
                    String tdName = project.getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName();
                    targetEndpoint = datasetCtrl.getByName(targetProject, tdName);
                    break;
                case MODELS:
                    targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_MODELS_DATASET);
                    break;
                case EXPERIMENTS:
                    targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_EXPERIMENTS_DATASET);
                    break;
                default:
                    throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "access type not supports:" + accessType);
            }
        } catch (FeaturestoreException | DatasetException e) {
            throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "cannot access the dataset of the artifact");
        }
    } else {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "access type not defined");
    }
    DatasetPath targetEndpointPath = datasetHelper.getTopLevelDatasetPath(project, targetEndpoint);
    ProvArtifactUsageParentDTO status = usageBuilder.buildAccessible(uriInfo, user, targetEndpointPath, artifactId, params.getUsageType());
    return Response.ok().entity(status).build();
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) ProvArtifactUsageParentDTO(io.hops.hopsworks.api.provenance.ops.dto.ProvArtifactUsageParentDTO) Users(io.hops.hopsworks.persistence.entity.user.Users) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException) GenericException(io.hops.hopsworks.exceptions.GenericException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Path(javax.ws.rs.Path) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) ApiOperation(io.swagger.annotations.ApiOperation) ApiKeyRequired(io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles)

Aggregations

Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)63 DatasetException (io.hops.hopsworks.exceptions.DatasetException)20 Project (io.hops.hopsworks.persistence.entity.project.Project)19 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)18 Path (javax.ws.rs.Path)14 Produces (javax.ws.rs.Produces)13 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Users (io.hops.hopsworks.persistence.entity.user.Users)12 ArrayList (java.util.ArrayList)11 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)10 Path (org.apache.hadoop.fs.Path)10 IOException (java.io.IOException)9 ProjectException (io.hops.hopsworks.exceptions.ProjectException)8 GET (javax.ws.rs.GET)8 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)7 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)7 GenericException (io.hops.hopsworks.exceptions.GenericException)7 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)7 FeaturestoreException (io.hops.hopsworks.exceptions.FeaturestoreException)6 POST (javax.ws.rs.POST)6