use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method copy.
public void copy(Project project, Users user, Path sourcePath, Path destPath, Dataset sourceDataset, Dataset destDataset) throws DatasetException {
String username = hdfsUsersController.getHdfsUserName(project, user);
if (!getOwningProject(sourceDataset).equals(destDataset.getProject())) {
throw new DatasetException(RESTCodes.DatasetErrorCode.COPY_FROM_PROJECT, Level.FINE);
}
if (destDataset.isPublicDs()) {
throw new DatasetException(RESTCodes.DatasetErrorCode.COPY_TO_PUBLIC_DS, Level.FINE);
}
DistributedFileSystemOps udfso = null;
try {
udfso = dfs.getDfsOps(username);
if (udfso.exists(destPath.toString())) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE);
}
// Get destination folder permissions
FsPermission permission = udfso.getFileStatus(destPath.getParent()).getPermission();
udfso.copyInHdfs(sourcePath, destPath);
// Set permissions
recChangeOwnershipAndPermission(destPath, permission, null, null, null, udfso);
} catch (IOException ex) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "move operation " + "failed for: " + sourcePath.toString(), ex.getMessage(), ex);
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method shareInternal.
private DatasetSharedWith shareInternal(Project targetProject, Dataset ds, Users user, DatasetAccessPermission permission) throws DatasetException {
DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, ds);
if (datasetSharedWith != null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "Dataset already in " + targetProject.getName());
}
// Create the new Dataset entry
datasetSharedWith = new DatasetSharedWith(targetProject, ds, permission, false, user);
// if the dataset is not requested or is requested by a data scientist set status to pending.
DatasetRequest dsReq = datasetRequest.findByProjectAndDataset(targetProject, ds);
if (ds.isPublicDs()) {
if (targetProject.equals(ds.getProject())) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_ALREADY_IN_PROJECT, Level.FINE, "Dataset already in project.");
}
datasetSharedWith.setAccepted(true);
datasetSharedWith.setAcceptedBy(user);
addMembersToGroup(datasetSharedWith);
} else if (dsReq != null && dsReq.getProjectTeam().getTeamRole().equals(AllowedRoles.DATA_OWNER)) {
// dataset is private and requested by a data owner
datasetSharedWith.setAccepted(true);
datasetSharedWith.setAcceptedBy(dsReq.getProjectTeam().getUser());
addMembersToGroup(datasetSharedWith);
}
datasetSharedWithFacade.save(datasetSharedWith);
if (dsReq != null) {
// the dataset is shared so remove the request.
datasetRequest.remove(dsReq);
}
activityFacade.persistActivity(ActivityFacade.SHARED_DATA + ds.getName() + " with project " + targetProject.getName(), ds.getProject(), user, ActivityFlag.DATASET);
return datasetSharedWith;
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method unshare.
public void unshare(Project project, Users user, Dataset dataset, String targetProjectName, DistributedFileSystemOps dfso) throws DatasetException {
Project targetProject = projectFacade.findByName(targetProjectName);
if (targetProject == null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.TARGET_PROJECT_NOT_FOUND, Level.FINE);
}
DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, dataset);
if (datasetSharedWith == null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_SHARED_WITH_PROJECT, Level.FINE, "project: " + targetProject.getName());
}
if (DatasetType.FEATURESTORE.equals(datasetSharedWith.getDataset().getDsType())) {
DatasetSharedWith trainingDataset = getSharedTrainingDataset(targetProject, datasetSharedWith.getDataset().getProject());
if (trainingDataset != null) {
unshareDs(project, user, trainingDataset, dfso);
}
unshareFeatureStoreServiceDataset(user, project, targetProject, datasetSharedWith, Settings.ServiceDataset.STATISTICS, dfso);
}
unshareDs(project, user, datasetSharedWith, dfso);
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method createDataset.
/**
* Create a new DataSet. This is, a folder right under the project home
* folder.
* **The Dataset directory is created using the superuser dfso**
*
* @param user The creating Users. Cannot be null.
* @param project The project under which to create the DataSet. Cannot be
* null.
* @param dataSetName The name of the DataSet being created. Cannot be null
* and must satisfy the validity criteria for a folder name.
* @param datasetDescription The description of the DataSet being created. Can
* be null.
* @param stickyBit Whether or not the dataset should have the sticky bit set
* @param permission
* @param dfso
* folder names, or the folder already exists.
*/
public Dataset createDataset(Users user, Project project, String dataSetName, String datasetDescription, ProvTypeDTO metaStatus, boolean stickyBit, DatasetAccessPermission permission, DistributedFileSystemOps dfso) throws DatasetException, HopsSecurityException {
// Parameter checking.
if (user == null || project == null || dataSetName == null) {
throw new IllegalArgumentException("User, project or dataset were not provided");
}
FolderNameValidator.isValidName(dataSetName);
// Logic
boolean success;
String dsPath = Utils.getProjectPath(project.getName()) + dataSetName;
Inode parent = inodeController.getProjectRoot(project.getName());
Inode ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
if (ds != null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "Dataset name: " + dataSetName);
}
Dataset newDS = null;
// Permission 770
FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE, stickyBit);
success = createFolder(dsPath, fsPermission, dfso);
if (success) {
try {
ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
newDS = new Dataset(ds, project, permission);
newDS.setSearchable(isSearchable(metaStatus.getMetaStatus()));
if (datasetDescription != null) {
newDS.setDescription(datasetDescription);
}
datasetFacade.persistDataset(newDS);
activityFacade.persistActivity(ActivityFacade.NEW_DATA + dataSetName, project, user, ActivityFlag.DATASET);
// creates a dataset and adds user as owner.
hdfsUsersController.createDatasetGroupsAndSetPermissions(user, project, newDS, new Path(dsPath), dfso);
Dataset logDs = getByProjectAndDsName(project, null, dataSetName);
// set the dataset meta enabled(or prov). Support 3 level indexing
fsProvController.updateDatasetProvType(logDs, metaStatus, dfso);
logDataset(project, logDs, OperationType.Add);
} catch (Exception e) {
try {
// if dataset persist fails rm ds folder.
dfso.rm(new Path(dsPath), true);
} catch (IOException ex) {
// Dataset clean up failed. Log the exception for further debugging.
LOGGER.log(Level.SEVERE, "Could not cleanup dataset dir after exception: " + dsPath, ex);
}
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "Could not create dataset: " + dataSetName, e.getMessage(), e);
}
} else {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.INFO, "Could not create dataset: " + dataSetName);
}
return newDS;
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetHelper method getNewDatasetPath.
public DatasetPath getNewDatasetPath(Project project, String path, DatasetType datasetType) throws DatasetException {
String rootDir;
if (datasetType == null) {
rootDir = path.startsWith(settings.getHiveWarehouse()) ? settings.getHiveWarehouse() : Settings.DIR_ROOT;
} else {
rootDir = datasetType.equals(DatasetType.DATASET) ? Settings.DIR_ROOT : settings.getHiveWarehouse();
}
DatasetPath datasetPath;
try {
datasetPath = new DatasetPath(project, path, rootDir);
} catch (UnsupportedEncodingException e) {
throw new DatasetException(RESTCodes.DatasetErrorCode.PATH_ENCODING_NOT_SUPPORTED, Level.FINE);
}
return datasetPath;
}
Aggregations