Search in sources :

Example 1 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ModelUtils method getModelsAccessor.

public ModelsController.Accessor getModelsAccessor(Users user, Project userProject, Project modelProject, Project experimentProject) throws DatasetException {
    DistributedFileSystemOps udfso = null;
    try {
        String hdfsUser = hdfsUsersController.getHdfsUserName(experimentProject, user);
        udfso = dfs.getDfsOps(hdfsUser);
        return new ModelsController.Accessor(user, userProject, modelProject, experimentProject, udfso, hdfsUser);
    } catch (Throwable t) {
        if (udfso != null) {
            dfs.closeDfsClient(udfso);
        }
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.INFO);
    }
}
Also used : DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 2 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class RequestService method requestAccess.

@POST
@Path("/access")
@Produces(MediaType.APPLICATION_JSON)
public Response requestAccess(RequestDTO requestDTO, @Context SecurityContext sc) throws DatasetException, ProjectException {
    RESTApiJsonResponse json = new RESTApiJsonResponse();
    if (requestDTO == null || requestDTO.getInodeId() == null || requestDTO.getProjectId() == null) {
        throw new IllegalArgumentException("requestDTO was not provided or was incomplete!");
    }
    Users user = jWTHelper.getUserPrincipal(sc);
    Inode inode = inodes.findById(requestDTO.getInodeId());
    // requested project
    Project proj = datasetCtrl.getOwningProject(inode);
    Dataset ds = datasetFacade.findByProjectAndInode(proj, inode);
    // requesting project
    Project project = projectFacade.find(requestDTO.getProjectId());
    Dataset dsInRequesting = datasetFacade.findByProjectAndInode(project, inode);
    if (dsInRequesting != null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.INFO);
    }
    ProjectTeam projectTeam = projectTeamFacade.findByPrimaryKey(project, user);
    ProjectTeam projTeam = projectTeamFacade.findByPrimaryKey(proj, user);
    if (projTeam != null && proj.equals(project)) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.TEAM_MEMBER_ALREADY_EXISTS, Level.FINE);
    }
    DatasetRequest dsRequest = datasetRequest.findByProjectAndDataset(project, ds);
    // email body
    String msg = "Hi " + proj.getOwner().getFname() + " " + proj.getOwner().getLname() + ", \n\n" + user.getFname() + " " + user.getLname() + " wants access to a dataset in a project you own. \n\n" + "Dataset name: " + ds.getInode().getInodePK().getName() + "\n" + "Project name: " + proj.getName() + "\n";
    if (!Strings.isNullOrEmpty(requestDTO.getMessageContent())) {
        msg += "Attached message: " + requestDTO.getMessageContent() + "\n";
    }
    msg += "After logging in to Hopsworks go to : /project/" + proj.getId() + "/datasets " + " if you want to share this dataset. \n";
    // or the prior request is from a data owner do nothing.
    if (dsRequest != null && (dsRequest.getProjectTeam().getTeamRole().equals(projectTeam.getTeamRole()) || dsRequest.getProjectTeam().getTeamRole().equals(AllowedProjectRoles.DATA_OWNER))) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_REQUEST_EXISTS, Level.FINE);
    } else if (dsRequest != null && projectTeam.getTeamRole().equals(AllowedProjectRoles.DATA_OWNER)) {
        dsRequest.setProjectTeam(projectTeam);
        dsRequest.setMessageContent(requestDTO.getMessageContent());
        datasetRequest.merge(dsRequest);
    } else {
        Users to = userFacade.findByEmail(proj.getOwner().getEmail());
        String message = "Hi " + to.getFname() + "<br>" + "I would like to request access to a dataset in a project you own. <br>" + "Project name: " + proj.getName() + "<br>" + "Dataset name: " + ds.getInode().getInodePK().getName() + "<br>" + "To be shared with my project: " + project.getName() + ".<br>" + "Thank you in advance.";
        String preview = user.getFname() + " would like to have access to a dataset in a project you own.";
        String subject = Settings.MESSAGE_DS_REQ_SUBJECT;
        String path = "project/" + proj.getId() + "/datasets";
        // to, from, msg, requested path
        Message newMsg = new Message(user, to, null, message, true, false);
        newMsg.setPath(path);
        newMsg.setSubject(subject);
        newMsg.setPreview(preview);
        messageBean.send(newMsg);
        dsRequest = new DatasetRequest(ds, projectTeam, requestDTO.getMessageContent(), newMsg);
        try {
            datasetRequest.persistDataset(dsRequest);
        } catch (Exception ex) {
            messageBean.remove(newMsg);
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_REQUEST_ERROR, Level.WARNING, ex.getMessage(), null, ex);
        }
    }
    try {
        emailBean.sendEmail(proj.getOwner().getEmail(), RecipientType.TO, "Access request for dataset " + ds.getInode().getInodePK().getName(), msg);
    } catch (MessagingException ex) {
        json.setErrorMsg("Could not send e-mail to " + project.getOwner().getEmail());
        datasetRequest.remove(dsRequest);
        return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(json).build();
    }
    json.setSuccessMessage("Request sent successfully.");
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(json).build();
}
Also used : Message(io.hops.hopsworks.persistence.entity.message.Message) MessagingException(javax.mail.MessagingException) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) Users(io.hops.hopsworks.persistence.entity.user.Users) MessagingException(javax.mail.MessagingException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) Project(io.hops.hopsworks.persistence.entity.project.Project) ProjectTeam(io.hops.hopsworks.persistence.entity.project.team.ProjectTeam) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) DatasetRequest(io.hops.hopsworks.persistence.entity.dataset.DatasetRequest) RESTApiJsonResponse(io.hops.hopsworks.api.util.RESTApiJsonResponse) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces)

Example 3 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class HopsUtils method removeFiles.

private static void removeFiles(String path, String hdfsUsername, DistributedFsService dfs) throws DatasetException {
    DistributedFileSystemOps udfso = null;
    try {
        udfso = dfs.getDfsOps(hdfsUsername);
        if (udfso == null) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.INFO, "Failed to remove files - try to manually remove:" + path, "Could not create udfso to perform operations on the file system");
        }
        udfso.rm(path, true);
    } catch (IOException e) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.INFO, "Failed to remove files - try to manually remove:" + path, "File system rm operation failure on:" + path);
    } finally {
        if (udfso != null) {
            dfs.closeDfsClient(udfso);
        }
    }
}
Also used : DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) IOException(java.io.IOException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 4 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ExperimentsResource method post.

@ApiOperation(value = "Create or update an experiment", response = ExperimentDTO.class)
@PUT
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response post(@PathParam("id") String id, ExperimentDTO experimentDTO, @QueryParam("type") ExperimentOperationType type, @Context HttpServletRequest req, @Context UriInfo uriInfo, @Context SecurityContext sc) throws DatasetException, ProvenanceException, PythonException, MetadataException, ProjectException, GenericException, ExperimentsException {
    if (experimentDTO == null) {
        throw new IllegalArgumentException("No Experiment configuration was provided");
    }
    Users user = jwtHelper.getUserPrincipal(sc);
    Project experimentProject = project;
    switch(type) {
        case INIT:
            {
                String experimentPath = Utils.getProjectPath(project.getName()) + Settings.HOPS_EXPERIMENTS_DATASET + "/" + id + "/" + Settings.ENVIRONMENT_FILE;
                experimentDTO.setEnvironment(environmentController.exportEnv(experimentProject, user, experimentPath));
                try {
                    String program = experimentsController.versionProgram(experimentProject, user, experimentDTO.getJobName(), experimentDTO.getKernelId(), id);
                    experimentDTO.setProgram(program);
                } catch (Exception e) {
                    LOGGER.log(Level.SEVERE, "Could not version notebook " + e.getMessage());
                }
            }
            break;
        case MODEL_UPDATE:
            {
                Project modelProject = getModelsProjectAndCheckAccess(experimentDTO);
                experimentsController.attachModel(user, experimentProject, id, modelProject, experimentDTO.getModel());
            }
            break;
        case FULL_UPDATE:
            {
            // no need to update the summary in any way
            }
            break;
        default:
            {
                throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "unhandled experiment summary operation type:" + type);
            }
    }
    experimentsController.attachExperiment(user, experimentProject, id, experimentDTO);
    UriBuilder builder = uriInfo.getAbsolutePathBuilder().path(id);
    switch(type) {
        case INIT:
            return Response.created(builder.build()).entity(experimentDTO).build();
        case MODEL_UPDATE:
        case FULL_UPDATE:
            return Response.ok(builder.build()).entity(experimentDTO).build();
        default:
            {
                throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "unhandled experiment summary operation type:" + type);
            }
    }
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Users(io.hops.hopsworks.persistence.entity.user.Users) UriBuilder(javax.ws.rs.core.UriBuilder) GenericException(io.hops.hopsworks.exceptions.GenericException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) ExperimentsException(io.hops.hopsworks.exceptions.ExperimentsException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) GenericException(io.hops.hopsworks.exceptions.GenericException) PythonException(io.hops.hopsworks.exceptions.PythonException) MetadataException(io.hops.hopsworks.exceptions.MetadataException) ProvenanceException(io.hops.hopsworks.exceptions.ProvenanceException) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) ApiOperation(io.swagger.annotations.ApiOperation) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles) PUT(javax.ws.rs.PUT)

Example 5 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DsUpdateOperations method deleteDatasetFile.

/**
 * Deletes a file inside a top-level dataset
 *
 * @param project  the project of the user making the request
 * @param user     the user making the request
 * @param fileName the name of the folder or file to remove
 * @return the fullpath of the deleted file
 * @throws DatasetException
 * @throws ProjectException
 */
public org.apache.hadoop.fs.Path deleteDatasetFile(Project project, Users user, String fileName) throws DatasetException, ProjectException, HopsSecurityException, UnsupportedEncodingException {
    boolean success = false;
    DistributedFileSystemOps dfso = null;
    DsPath dsPath = pathValidator.validatePath(project, fileName);
    Dataset ds = dsPath.getDs();
    org.apache.hadoop.fs.Path fullPath = dsPath.getFullPath();
    org.apache.hadoop.fs.Path dsRelativePath = dsPath.getDsRelativePath();
    if (dsRelativePath.depth() == 0) {
        throw new IllegalArgumentException("Use endpoint DELETE /{datasetName} to delete top level dataset)");
    }
    try {
        String username = hdfsUsersBean.getHdfsUserName(project, user);
        // If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
        // Find project of dataset as it might be shared
        Project owning = datasetController.getOwningProject(ds);
        boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
        if (isMember && projectTeamFacade.findCurrentRole(owning, user).equals(AllowedProjectRoles.DATA_OWNER) && owning.equals(project)) {
            // do it as super user
            dfso = dfs.getDfsOps();
        } else {
            // do it as project user
            dfso = dfs.getDfsOps(username);
        }
        success = dfso.rm(fullPath, true);
    } catch (AccessControlException ex) {
        throw new HopsSecurityException(RESTCodes.SecurityErrorCode.HDFS_ACCESS_CONTROL, Level.FINE, "Operation: delete, path: " + fullPath.toString(), ex.getMessage(), ex);
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.SEVERE, "path: " + fullPath.toString(), ex.getMessage(), ex);
    } finally {
        if (dfso != null) {
            dfs.closeDfsClient(dfso);
        }
    }
    if (!success) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.FINE, "path: " + fullPath.toString());
    }
    return fullPath;
}
Also used : Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Project(io.hops.hopsworks.persistence.entity.project.Project)

Aggregations

DatasetException (io.hops.hopsworks.exceptions.DatasetException)61 IOException (java.io.IOException)25 Project (io.hops.hopsworks.persistence.entity.project.Project)23 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)21 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)18 ProjectException (io.hops.hopsworks.exceptions.ProjectException)12 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Produces (javax.ws.rs.Produces)12 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)11 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 Users (io.hops.hopsworks.persistence.entity.user.Users)10 Path (javax.ws.rs.Path)10 Path (org.apache.hadoop.fs.Path)10 GenericException (io.hops.hopsworks.exceptions.GenericException)9 AccessControlException (org.apache.hadoop.security.AccessControlException)8 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)7 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6