use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class DatasetController method getOrCreateSharedFeatureStoreServiceDataset.
private DatasetSharedWith getOrCreateSharedFeatureStoreServiceDataset(Project project, Project parentProject, DatasetAccessPermission permission, Users sharedBy, Settings.ServiceDataset serviceDataset) {
Dataset dataset = getFeatureStoreServiceDataset(parentProject, serviceDataset);
DatasetSharedWith sharedDataset = datasetSharedWithFacade.findByProjectAndDataset(project, dataset);
if (sharedDataset == null) {
sharedDataset = new DatasetSharedWith(project, dataset, permission, false, sharedBy);
datasetSharedWithFacade.save(sharedDataset);
sharedDataset = datasetSharedWithFacade.findByProjectAndDataset(project, dataset);
}
return sharedDataset;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class DatasetController method share.
public void share(String targetProjectName, String fullPath, DatasetAccessPermission permission, Project project, Users user) throws DatasetException, ProjectException {
Project targetProject = projectFacade.findByName(targetProjectName);
Dataset ds = getByProjectAndFullPath(project, fullPath);
if (targetProject == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Target project not found.");
}
DatasetSharedWith datasetSharedWith = shareInternal(targetProject, ds, user, permission);
if (DatasetType.FEATURESTORE.equals(ds.getDsType()) && datasetSharedWith.getAccepted()) {
Dataset trainingDataset = getTrainingDataset(project);
if (trainingDataset != null) {
try {
shareInternal(targetProject, trainingDataset, user, permission);
} catch (DatasetException de) {
// Dataset already shared nothing to do
}
}
// If we migrate Training Datasets to remove the project prefix, these methods can be reused
shareFeatureStoreServiceDataset(user, project, targetProject, permission, Settings.ServiceDataset.STATISTICS);
}
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class DatasetController method createDataset.
/**
* Create a new DataSet. This is, a folder right under the project home
* folder.
* **The Dataset directory is created using the superuser dfso**
*
* @param user The creating Users. Cannot be null.
* @param project The project under which to create the DataSet. Cannot be
* null.
* @param dataSetName The name of the DataSet being created. Cannot be null
* and must satisfy the validity criteria for a folder name.
* @param datasetDescription The description of the DataSet being created. Can
* be null.
* @param stickyBit Whether or not the dataset should have the sticky bit set
* @param permission
* @param dfso
* folder names, or the folder already exists.
*/
public Dataset createDataset(Users user, Project project, String dataSetName, String datasetDescription, ProvTypeDTO metaStatus, boolean stickyBit, DatasetAccessPermission permission, DistributedFileSystemOps dfso) throws DatasetException, HopsSecurityException {
// Parameter checking.
if (user == null || project == null || dataSetName == null) {
throw new IllegalArgumentException("User, project or dataset were not provided");
}
FolderNameValidator.isValidName(dataSetName);
// Logic
boolean success;
String dsPath = Utils.getProjectPath(project.getName()) + dataSetName;
Inode parent = inodeController.getProjectRoot(project.getName());
Inode ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
if (ds != null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "Dataset name: " + dataSetName);
}
Dataset newDS = null;
// Permission 770
FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE, stickyBit);
success = createFolder(dsPath, fsPermission, dfso);
if (success) {
try {
ds = inodes.findByInodePK(parent, dataSetName, HopsUtils.dataSetPartitionId(parent, dataSetName));
newDS = new Dataset(ds, project, permission);
newDS.setSearchable(isSearchable(metaStatus.getMetaStatus()));
if (datasetDescription != null) {
newDS.setDescription(datasetDescription);
}
datasetFacade.persistDataset(newDS);
activityFacade.persistActivity(ActivityFacade.NEW_DATA + dataSetName, project, user, ActivityFlag.DATASET);
// creates a dataset and adds user as owner.
hdfsUsersController.createDatasetGroupsAndSetPermissions(user, project, newDS, new Path(dsPath), dfso);
Dataset logDs = getByProjectAndDsName(project, null, dataSetName);
// set the dataset meta enabled(or prov). Support 3 level indexing
fsProvController.updateDatasetProvType(logDs, metaStatus, dfso);
logDataset(project, logDs, OperationType.Add);
} catch (Exception e) {
try {
// if dataset persist fails rm ds folder.
dfso.rm(new Path(dsPath), true);
} catch (IOException ex) {
// Dataset clean up failed. Log the exception for further debugging.
LOGGER.log(Level.SEVERE, "Could not cleanup dataset dir after exception: " + dsPath, ex);
}
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "Could not create dataset: " + dataSetName, e.getMessage(), e);
}
} else {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.INFO, "Could not create dataset: " + dataSetName);
}
return newDS;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class DatasetHelper method getDatasetPath.
public DatasetPath getDatasetPath(Project project, String path, DatasetType datasetType) throws DatasetException {
DatasetPath datasetPath = getNewDatasetPath(project, path, datasetType);
Dataset dataset = datasetController.getByProjectAndFullPath(project, datasetPath.getDatasetFullPath().toString());
datasetPath.setDataset(dataset);
if (dataset != null && dataset.isShared(project)) {
DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(project, dataset);
datasetPath.setDatasetSharedWith(datasetSharedWith);
}
if (datasetPath.isTopLevelDataset()) {
datasetPath.setInode(datasetPath.getDataset().getInode());
} else {
datasetPath.setInode(inodeController.getInodeAtPath(datasetPath.getDataset().getInode(), datasetPath.getDatasetFullPath().depth(), // expensive
datasetPath.getDatasetRelativePath()));
}
return datasetPath;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class ElasticController method datasetSearchHighLevel.
public SearchHit[] datasetSearchHighLevel(Integer projectId, String datasetName, String searchTerm) throws ServiceException, ElasticException {
// check if the indices are up and running
if (!elasticClientCtrl.mngIndexExists(Settings.META_INDEX)) {
throw new ServiceException(RESTCodes.ServiceErrorCode.ELASTIC_INDEX_NOT_FOUND, Level.SEVERE, "index: " + Settings.META_INDEX);
}
String dsName = datasetName;
Project project;
if (datasetName.contains(Settings.SHARED_FILE_SEPARATOR)) {
String[] sharedDS = datasetName.split(Settings.SHARED_FILE_SEPARATOR);
dsName = sharedDS[1];
project = projectFacade.findByName(sharedDS[0]);
} else {
project = projectFacade.find(projectId);
}
Dataset dataset = datasetController.getByProjectAndDsName(project, null, dsName);
final long datasetId = dataset.getInodeId();
SearchResponse response = executeSearchQuery(datasetSearchQuery(datasetId, searchTerm.toLowerCase()));
if (response.status().getStatus() == 200) {
if (response.getHits().getHits().length > 0) {
return response.getHits().getHits();
}
return new SearchHit[0];
}
// elasticsearch rather than a bad query
throw new ElasticException(RESTCodes.ElasticErrorCode.ELASTIC_QUERY_ERROR, Level.INFO, "Error while executing query, code: " + response.status().getStatus());
}
Aggregations