Search in sources :

Example 21 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method copy.

public void copy(Project project, Users user, Path sourcePath, Path destPath, Dataset sourceDataset, Dataset destDataset) throws DatasetException {
    String username = hdfsUsersController.getHdfsUserName(project, user);
    if (!getOwningProject(sourceDataset).equals(destDataset.getProject())) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.COPY_FROM_PROJECT, Level.FINE);
    }
    if (destDataset.isPublicDs()) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.COPY_TO_PUBLIC_DS, Level.FINE);
    }
    DistributedFileSystemOps udfso = null;
    try {
        udfso = dfs.getDfsOps(username);
        if (udfso.exists(destPath.toString())) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE);
        }
        // Get destination folder permissions
        FsPermission permission = udfso.getFileStatus(destPath.getParent()).getPermission();
        udfso.copyInHdfs(sourcePath, destPath);
        // Set permissions
        recChangeOwnershipAndPermission(destPath, permission, null, null, null, udfso);
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "move operation " + "failed for: " + sourcePath.toString(), ex.getMessage(), ex);
    } finally {
        dfs.closeDfsClient(udfso);
    }
}
Also used : DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 22 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method shareInternal.

private DatasetSharedWith shareInternal(Project targetProject, Dataset ds, Users user, DatasetAccessPermission permission) throws DatasetException {
    DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, ds);
    if (datasetSharedWith != null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "Dataset already in " + targetProject.getName());
    }
    // Create the new Dataset entry
    datasetSharedWith = new DatasetSharedWith(targetProject, ds, permission, false, user);
    // if the dataset is not requested or is requested by a data scientist set status to pending.
    DatasetRequest dsReq = datasetRequest.findByProjectAndDataset(targetProject, ds);
    if (ds.isPublicDs()) {
        if (targetProject.equals(ds.getProject())) {
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_ALREADY_IN_PROJECT, Level.FINE, "Dataset already in project.");
        }
        datasetSharedWith.setAccepted(true);
        datasetSharedWith.setAcceptedBy(user);
        addMembersToGroup(datasetSharedWith);
    } else if (dsReq != null && dsReq.getProjectTeam().getTeamRole().equals(AllowedRoles.DATA_OWNER)) {
        // dataset is private and requested by a data owner
        datasetSharedWith.setAccepted(true);
        datasetSharedWith.setAcceptedBy(dsReq.getProjectTeam().getUser());
        addMembersToGroup(datasetSharedWith);
    }
    datasetSharedWithFacade.save(datasetSharedWith);
    if (dsReq != null) {
        // the dataset is shared so remove the request.
        datasetRequest.remove(dsReq);
    }
    activityFacade.persistActivity(ActivityFacade.SHARED_DATA + ds.getName() + " with project " + targetProject.getName(), ds.getProject(), user, ActivityFlag.DATASET);
    return datasetSharedWith;
}
Also used : DatasetRequest(io.hops.hopsworks.persistence.entity.dataset.DatasetRequest) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 23 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method unshare.

public void unshare(Project project, Users user, Dataset dataset, String targetProjectName, DistributedFileSystemOps dfso) throws DatasetException {
    Project targetProject = projectFacade.findByName(targetProjectName);
    if (targetProject == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.TARGET_PROJECT_NOT_FOUND, Level.FINE);
    }
    DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, dataset);
    if (datasetSharedWith == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_SHARED_WITH_PROJECT, Level.FINE, "project: " + targetProject.getName());
    }
    if (DatasetType.FEATURESTORE.equals(datasetSharedWith.getDataset().getDsType())) {
        DatasetSharedWith trainingDataset = getSharedTrainingDataset(targetProject, datasetSharedWith.getDataset().getProject());
        if (trainingDataset != null) {
            unshareDs(project, user, trainingDataset, dfso);
        }
        unshareFeatureStoreServiceDataset(user, project, targetProject, datasetSharedWith, Settings.ServiceDataset.STATISTICS, dfso);
    }
    unshareDs(project, user, datasetSharedWith, dfso);
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 24 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method createDataset.

/**
 * Create a new DataSet. This is, a folder right under the project home
 * folder.
 * **The Dataset directory is created using the superuser dfso**
 *
 * @param user The creating Users. Cannot be null.
 * @param project The project under which to create the DataSet. Cannot be
 * null.
 * @param dataSetName The name of the DataSet being created. Cannot be null
 * and must satisfy the validity criteria for a folder name.
 * @param datasetDescription The description of the DataSet being created. Can
 * be null.
 * @param stickyBit Whether or not the dataset should have the sticky bit set
 * @param permission
 * @param dfso
 * folder names, or the folder already exists.
 */
public Dataset createDataset(Users user, Project project, String dataSetName, String datasetDescription, ProvTypeDTO metaStatus, boolean stickyBit, DatasetAccessPermission permission, DistributedFileSystemOps dfso) throws DatasetException, HopsSecurityException {
    // Parameter checking.
    if (user == null || project == null || dataSetName == null) {
        throw new IllegalArgumentException("User, project or dataset were not provided");
    }
    FolderNameValidator.isValidName(dataSetName);
    // Logic
    boolean success;
    String dsPath = Utils.getProjectPath(project.getName()) + dataSetName;
    Inode parent = inodeController.getProjectRoot(project.getName());
    Inode ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
    if (ds != null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "Dataset name: " + dataSetName);
    }
    Dataset newDS = null;
    // Permission 770
    FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE, stickyBit);
    success = createFolder(dsPath, fsPermission, dfso);
    if (success) {
        try {
            ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
            newDS = new Dataset(ds, project, permission);
            newDS.setSearchable(isSearchable(metaStatus.getMetaStatus()));
            if (datasetDescription != null) {
                newDS.setDescription(datasetDescription);
            }
            datasetFacade.persistDataset(newDS);
            activityFacade.persistActivity(ActivityFacade.NEW_DATA + dataSetName, project, user, ActivityFlag.DATASET);
            // creates a dataset and adds user as owner.
            hdfsUsersController.createDatasetGroupsAndSetPermissions(user, project, newDS, new Path(dsPath), dfso);
            Dataset logDs = getByProjectAndDsName(project, null, dataSetName);
            // set the dataset meta enabled(or prov). Support 3 level indexing
            fsProvController.updateDatasetProvType(logDs, metaStatus, dfso);
            logDataset(project, logDs, OperationType.Add);
        } catch (Exception e) {
            try {
                // if dataset persist fails rm ds folder.
                dfso.rm(new Path(dsPath), true);
            } catch (IOException ex) {
                // Dataset clean up failed. Log the exception for further debugging.
                LOGGER.log(Level.SEVERE, "Could not cleanup dataset dir after exception: " + dsPath, ex);
            }
            throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "Could not create dataset: " + dataSetName, e.getMessage(), e);
        }
    } else {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.INFO, "Could not create dataset: " + dataSetName);
    }
    return newDS;
}
Also used : Path(org.apache.hadoop.fs.Path) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) FileNotFoundException(java.io.FileNotFoundException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) IOException(java.io.IOException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) AccessControlException(org.apache.hadoop.security.AccessControlException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 25 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetHelper method getNewDatasetPath.

public DatasetPath getNewDatasetPath(Project project, String path, DatasetType datasetType) throws DatasetException {
    String rootDir;
    if (datasetType == null) {
        rootDir = path.startsWith(settings.getHiveWarehouse()) ? settings.getHiveWarehouse() : Settings.DIR_ROOT;
    } else {
        rootDir = datasetType.equals(DatasetType.DATASET) ? Settings.DIR_ROOT : settings.getHiveWarehouse();
    }
    DatasetPath datasetPath;
    try {
        datasetPath = new DatasetPath(project, path, rootDir);
    } catch (UnsupportedEncodingException e) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.PATH_ENCODING_NOT_SUPPORTED, Level.FINE);
    }
    return datasetPath;
}
Also used : UnsupportedEncodingException(java.io.UnsupportedEncodingException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Aggregations

DatasetException (io.hops.hopsworks.exceptions.DatasetException)61 IOException (java.io.IOException)25 Project (io.hops.hopsworks.persistence.entity.project.Project)23 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)21 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)18 ProjectException (io.hops.hopsworks.exceptions.ProjectException)12 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Produces (javax.ws.rs.Produces)12 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)11 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 Users (io.hops.hopsworks.persistence.entity.user.Users)10 Path (javax.ws.rs.Path)10 Path (org.apache.hadoop.fs.Path)10 GenericException (io.hops.hopsworks.exceptions.GenericException)9 AccessControlException (org.apache.hadoop.security.AccessControlException)8 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)7 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6