Search in sources :

Example 16 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method delete.

public void delete(Project project, Users user, Path fullPath, Dataset dataset, boolean isDataset) throws DatasetException {
    boolean success;
    String username = hdfsUsersController.getHdfsUserName(project, user);
    Project owning = getOwningProject(dataset);
    DistributedFileSystemOps dfso = null;
    if (isDataset && dataset.isShared(project)) {
        // The user is trying to delete a dataset. Drop it from the table
        // But leave it in hopsfs because the user doesn't have the right to delete it
        unshare(project, user, dataset, project.getName());
    } else {
        try {
            // If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
            // Find project of dataset as it might be shared
            boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
            if (isMember && projectTeamFacade.findCurrentRole(owning, user).equals(AllowedRoles.DATA_OWNER) && owning.equals(project)) {
                // do it as super user
                dfso = dfs.getDfsOps();
            } else {
                // do it as project user
                dfso = dfs.getDfsOps(username);
            }
            if (isDataset) {
                success = deleteDatasetDir(dataset, fullPath, dfso);
            } else {
                success = dfso.rm(fullPath, true);
            }
        } catch (AccessControlException ae) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_ACCESS_PERMISSION_DENIED, Level.FINE, "path: " + fullPath.toString(), ae.getMessage(), ae);
        } catch (FileNotFoundException fnfe) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "path: " + fullPath.toString(), fnfe.getMessage(), fnfe);
        } catch (IOException ex) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.SEVERE, "path: " + fullPath.toString(), ex.getMessage(), ex);
        } finally {
            dfs.closeDfsClient(dfso);
        }
        if (!success) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.FINE, "path: " + fullPath.toString());
        }
        if (isDataset) {
            // remove the groups associated with this dataset as it is a toplevel ds
            try {
                hdfsUsersController.deleteDatasetGroups(project, dataset);
            } catch (IOException ex) {
                // FIXME: take an action?
                LOGGER.log(Level.WARNING, "Error while trying to delete the dataset groups", ex);
            }
        }
    }
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 17 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method share.

public void share(String targetProjectName, String fullPath, DatasetAccessPermission permission, Project project, Users user) throws DatasetException, ProjectException {
    Project targetProject = projectFacade.findByName(targetProjectName);
    Dataset ds = getByProjectAndFullPath(project, fullPath);
    if (targetProject == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Target project not found.");
    }
    DatasetSharedWith datasetSharedWith = shareInternal(targetProject, ds, user, permission);
    if (DatasetType.FEATURESTORE.equals(ds.getDsType()) && datasetSharedWith.getAccepted()) {
        Dataset trainingDataset = getTrainingDataset(project);
        if (trainingDataset != null) {
            try {
                shareInternal(targetProject, trainingDataset, user, permission);
            } catch (DatasetException de) {
            // Dataset already shared nothing to do
            }
        }
        // If we migrate Training Datasets to remove the project prefix, these methods can be reused
        shareFeatureStoreServiceDataset(user, project, targetProject, permission, Settings.ServiceDataset.STATISTICS);
    }
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) Project(io.hops.hopsworks.persistence.entity.project.Project) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 18 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method unzip.

public void unzip(Project project, Users user, Path path, Path destPath) throws DatasetException {
    String hdfsUser = hdfsUsersController.getHdfsUserName(project, user);
    checkFileExists(path, hdfsUser);
    CompressionInfo compressionInfo = new CompressionInfo(path, destPath);
    String stagingDir = settings.getStagingDir() + File.separator + compressionInfo.getStagingDirectory();
    File unzipDir = new File(stagingDir);
    unzipDir.mkdirs();
    settings.addUnzippingState(compressionInfo);
    ProcessDescriptor.Builder processDescriptorBuilder = new ProcessDescriptor.Builder().addCommand(settings.getHopsworksDomainDir() + "/bin/unzip-background.sh").addCommand(stagingDir).addCommand(path.toString()).addCommand(hdfsUser);
    if (destPath != null) {
        processDescriptorBuilder.addCommand(destPath.toString());
    }
    ProcessDescriptor processDescriptor = processDescriptorBuilder.ignoreOutErrStreams(true).build();
    try {
        ProcessResult processResult = osProcessExecutor.execute(processDescriptor);
        int result = processResult.getExitCode();
        if (result == 2) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.COMPRESSION_SIZE_ERROR, Level.WARNING);
        }
        if (result != 0) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.COMPRESSION_ERROR, Level.WARNING, "path: " + path.toString() + ", result: " + result);
        }
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.COMPRESSION_ERROR, Level.SEVERE, "path: " + path.toString(), ex.getMessage(), ex);
    }
}
Also used : ProcessResult(io.hops.hopsworks.common.util.ProcessResult) ProcessDescriptor(io.hops.hopsworks.common.util.ProcessDescriptor) IOException(java.io.IOException) CompressionInfo(io.hops.hopsworks.common.dataset.util.CompressionInfo) File(java.io.File) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 19 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method deleteCorrupted.

public void deleteCorrupted(Project project, Users user, Path fullPath, Dataset dataset) throws DatasetException {
    DistributedFileSystemOps dfso = null;
    try {
        // If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
        // Find project of dataset as it might be shared
        Project owning = getOwningProject(dataset);
        boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
        if (isMember && owning.equals(project)) {
            // do it as super user
            dfso = dfs.getDfsOps();
            FileStatus fs = dfso.getFileStatus(fullPath);
            String owner = fs.getOwner();
            long len = fs.getLen();
            if (owner.equals(settings.getHopsworksUser()) && len == 0) {
                dfso.rm(fullPath, true);
            }
        }
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.SEVERE, "path: " + fullPath.toString(), ex.getMessage(), ex);
    } finally {
        dfs.closeDfsClient(dfso);
    }
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) FileStatus(org.apache.hadoop.fs.FileStatus) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) IOException(java.io.IOException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 20 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method updatePermission.

public void updatePermission(Dataset ds, DatasetAccessPermission datasetPermissions, Project project, Project targetProject, Users user) throws DatasetException {
    if (ds.isShared(project)) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OWNER_ERROR, Level.FINE);
    }
    if (ds.isPublicDs()) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_PUBLIC_IMMUTABLE, Level.FINE);
    }
    PermissionTransition permissionTransition = PermissionTransition.valueOf(ds.getPermission(), datasetPermissions);
    changePermissions(ds, permissionTransition, targetProject);
    if (!permissionTransition.noop()) {
        activityFacade.persistActivity(ActivityFacade.CHANGE_DATASET_PERMISSION + " of " + ds.getName() + " from " + permissionTransition.getFrom().getDescription() + " to " + permissionTransition.getTo().getDescription(), project, user, ActivityFlag.DATASET);
    }
}
Also used : PermissionTransition(io.hops.hopsworks.persistence.entity.dataset.PermissionTransition) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Aggregations

DatasetException (io.hops.hopsworks.exceptions.DatasetException)61 IOException (java.io.IOException)25 Project (io.hops.hopsworks.persistence.entity.project.Project)23 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)21 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)18 ProjectException (io.hops.hopsworks.exceptions.ProjectException)12 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Produces (javax.ws.rs.Produces)12 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)11 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 Users (io.hops.hopsworks.persistence.entity.user.Users)10 Path (javax.ws.rs.Path)10 Path (org.apache.hadoop.fs.Path)10 GenericException (io.hops.hopsworks.exceptions.GenericException)9 AccessControlException (org.apache.hadoop.security.AccessControlException)8 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)7 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6