use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class FeaturestoreHopsfsConnectorController method updateFeaturestoreHopsfsConnector.
@TransactionAttribute(TransactionAttributeType.REQUIRED)
@Transactional(rollbackOn = FeaturestoreException.class)
public FeaturestoreHopsfsConnector updateFeaturestoreHopsfsConnector(Featurestore featurestore, FeaturestoreHopsfsConnectorDTO featurestoreHopsfsConnectorDTO, FeaturestoreHopsfsConnector featurestoreHopsfsConnector) throws FeaturestoreException {
if (!Strings.isNullOrEmpty(featurestoreHopsfsConnectorDTO.getDatasetName())) {
Dataset dataset = verifyHopsfsConnectorDatasetName(featurestoreHopsfsConnectorDTO.getDatasetName(), featurestore);
featurestoreHopsfsConnector.setHopsfsDataset(dataset);
}
return featurestoreHopsfsConnector;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class ExperimentsBuilder method verifyExperimentsEndpoint.
private ExperimentsEndpointDTO verifyExperimentsEndpoint(Project userProject, String sEndpointId) throws GenericException, DatasetException {
Integer endpointId;
try {
endpointId = Integer.parseInt(sEndpointId);
} catch (NumberFormatException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided Endpoint Id was malformed - expected a Integer ", e.getMessage(), e);
}
Project sharingProject = projectFacade.findById(endpointId).orElseThrow(() -> new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided project cannot be accessed"));
Dataset dataset = datasetCtrl.getByName(sharingProject, Settings.HOPS_EXPERIMENTS_DATASET);
if (dataset != null && accessCtrl.hasAccess(userProject, dataset)) {
return ExperimentsEndpointDTO.fromDataset(dataset);
}
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "Provided Endpoint cannot be accessed");
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class ExperimentsResource method getModelsProjectAndCheckAccess.
private Project getModelsProjectAndCheckAccess(ExperimentDTO experimentDTO) throws ProjectException, DatasetException, GenericException {
Project modelProject;
if (experimentDTO.getModelProjectName() == null) {
modelProject = project;
} else {
modelProject = projectFacade.findByName(experimentDTO.getModelProjectName());
if (modelProject == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.INFO, "model project not found for experiment");
}
}
Dataset modelDataset = datasetCtrl.getByName(modelProject, Settings.HOPS_MODELS_DATASET);
if (!accessCtrl.hasAccess(project, modelDataset)) {
throw new GenericException(RESTCodes.GenericErrorCode.NOT_AUTHORIZED_TO_ACCESS, Level.INFO);
}
return modelProject;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class DsUpdateOperations method deleteDatasetFile.
/**
* Deletes a file inside a top-level dataset
*
* @param project the project of the user making the request
* @param user the user making the request
* @param fileName the name of the folder or file to remove
* @return the fullpath of the deleted file
* @throws DatasetException
* @throws ProjectException
*/
public org.apache.hadoop.fs.Path deleteDatasetFile(Project project, Users user, String fileName) throws DatasetException, ProjectException, HopsSecurityException, UnsupportedEncodingException {
boolean success = false;
DistributedFileSystemOps dfso = null;
DsPath dsPath = pathValidator.validatePath(project, fileName);
Dataset ds = dsPath.getDs();
org.apache.hadoop.fs.Path fullPath = dsPath.getFullPath();
org.apache.hadoop.fs.Path dsRelativePath = dsPath.getDsRelativePath();
if (dsRelativePath.depth() == 0) {
throw new IllegalArgumentException("Use endpoint DELETE /{datasetName} to delete top level dataset)");
}
try {
String username = hdfsUsersBean.getHdfsUserName(project, user);
// If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
// Find project of dataset as it might be shared
Project owning = datasetController.getOwningProject(ds);
boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
if (isMember && projectTeamFacade.findCurrentRole(owning, user).equals(AllowedProjectRoles.DATA_OWNER) && owning.equals(project)) {
// do it as super user
dfso = dfs.getDfsOps();
} else {
// do it as project user
dfso = dfs.getDfsOps(username);
}
success = dfso.rm(fullPath, true);
} catch (AccessControlException ex) {
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.HDFS_ACCESS_CONTROL, Level.FINE, "Operation: delete, path: " + fullPath.toString(), ex.getMessage(), ex);
} catch (IOException ex) {
throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.SEVERE, "path: " + fullPath.toString(), ex.getMessage(), ex);
} finally {
if (dfso != null) {
dfs.closeDfsClient(dfso);
}
}
if (!success) {
throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_DELETION_ERROR, Level.FINE, "path: " + fullPath.toString());
}
return fullPath;
}
use of io.hops.hopsworks.persistence.entity.dataset.Dataset in project hopsworks by logicalclocks.
the class ProjectProvenanceResource method usage.
@GET
@Path("usage")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_SCIENTIST, AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.PROJECT }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiOperation(value = "Artifact usage", response = ProvArtifactUsageParentDTO.class)
public Response usage(@QueryParam("artifact_id") String artifactId, @QueryParam("endpoint_id") Integer endpointId, @QueryParam("artifact_type") DatasetAccessType accessType, @BeanParam ProvUsageBeanParams params, @Context UriInfo uriInfo, @Context SecurityContext sc) throws ProvenanceException, GenericException, DatasetException, MetadataException, SchematizedTagException {
Users user = jWTHelper.getUserPrincipal(sc);
if (artifactId == null) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "artifactId id cannot be null");
}
Project targetProject = project;
if (endpointId != null) {
targetProject = projectFacade.findById(endpointId).orElseThrow(() -> new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "target project not found"));
}
Dataset targetEndpoint;
if (accessType != null) {
try {
switch(accessType) {
case FEATUREGROUPS:
targetEndpoint = fsCtrl.getProjectFeaturestoreDataset(targetProject);
break;
case TRAININGDATASETS:
String tdName = project.getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName();
targetEndpoint = datasetCtrl.getByName(targetProject, tdName);
break;
case MODELS:
targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_MODELS_DATASET);
break;
case EXPERIMENTS:
targetEndpoint = datasetCtrl.getByName(targetProject, Settings.HOPS_EXPERIMENTS_DATASET);
break;
default:
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.FINE, "access type not supports:" + accessType);
}
} catch (FeaturestoreException | DatasetException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "cannot access the dataset of the artifact");
}
} else {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.FINE, "access type not defined");
}
DatasetPath targetEndpointPath = datasetHelper.getTopLevelDatasetPath(project, targetEndpoint);
ProvArtifactUsageParentDTO status = usageBuilder.buildAccessible(uriInfo, user, targetEndpointPath, artifactId, params.getUsageType());
return Response.ok().entity(status).build();
}
Aggregations