use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ModelUtils method getModelsAccessor.
public ModelsController.Accessor getModelsAccessor(Users user, Project userProject, Project modelProject, Project experimentProject) throws DatasetException {
DistributedFileSystemOps udfso = null;
try {
String hdfsUser = hdfsUsersController.getHdfsUserName(experimentProject, user);
udfso = dfs.getDfsOps(hdfsUser);
return new ModelsController.Accessor(user, userProject, modelProject, experimentProject, udfso, hdfsUser);
} catch (Throwable t) {
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.INFO);
}
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class X509Resource method getx509.
@GET
@TransactionAttribute(TransactionAttributeType.NEVER)
@Produces(MediaType.APPLICATION_JSON)
@JWTRequired(acceptedTokens = { Audience.SERVICES, Audience.API }, allowedUserRoles = { "AGENT", "HOPS_ADMIN" })
@ApiOperation(value = "Get keystore, truststore and password of a project user", response = AccessCredentialsDTO.class)
public Response getx509(@QueryParam("username") String projectUsername, @Context SecurityContext sc) throws ProjectException, UserException, HopsSecurityException {
try {
String projectName = hdfsUsersController.getProjectName(projectUsername);
String username = hdfsUsersController.getUserName(projectUsername);
Project project = projectController.findProjectByName(projectName);
Users user = userFacade.findByUsername(username);
if (user == null) {
throw new UserException(RESTCodes.UserErrorCode.USER_DOES_NOT_EXIST, Level.FINE);
}
try {
AccessCredentialsDTO credentialsDTO = projectController.credentials(project.getId(), user);
return Response.ok(credentialsDTO).build();
} catch (DatasetException ex) {
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERTIFICATE_NOT_FOUND, Level.FINE);
}
} catch (ArrayIndexOutOfBoundsException ex) {
throw new UserException(RESTCodes.UserErrorCode.USER_WAS_NOT_FOUND, Level.FINE, "Invalid project user format for username: " + projectUsername);
}
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetResource method postByPath.
@POST
@Path("{path: .+}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Post an action on a file, dir or dataset.")
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_CREATE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER", "HOPS_SERVICE_USER" })
public Response postByPath(@Context UriInfo uriInfo, @Context SecurityContext sc, @Context HttpServletRequest req, @PathParam("path") String path, @QueryParam("type") DatasetType datasetType, @QueryParam("target_project") String targetProjectName, @QueryParam("action") DatasetActions.Post action, @QueryParam("description") String description, @QueryParam("searchable") Boolean searchable, @QueryParam("generate_readme") Boolean generateReadme, @QueryParam("destination_path") String destPath, @QueryParam("destination_type") DatasetType destDatasetType, @DefaultValue("READ_ONLY") @QueryParam("permission") DatasetAccessPermission permission) throws DatasetException, ProjectException, HopsSecurityException, ProvenanceException, MetadataException, SchematizedTagException, FeaturestoreException {
Users user = jwtHelper.getUserPrincipal(sc);
DatasetPath datasetPath;
DatasetPath distDatasetPath;
Project project = this.getProject();
switch(action == null ? DatasetActions.Post.CREATE : action) {
case CREATE:
if (datasetType != null && !datasetType.equals(DatasetType.DATASET)) {
// can only create dataset
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_INVALID, Level.FINE);
}
datasetPath = datasetHelper.getNewDatasetPath(project, path, DatasetType.DATASET);
if (datasetPath.isTopLevelDataset()) {
checkIfDataOwner(project, user);
}
if (datasetPath.isTopLevelDataset() && !datasetHelper.isBasicDatasetProjectParent(project, datasetPath)) {
// fake shared dataset with :: in dataset name at dataset creation
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NAME_INVALID, Level.FINE);
}
ProvTypeDTO projectProvCore = fsProvenanceController.getMetaStatus(user, project, searchable);
ResourceRequest resourceRequest;
if (datasetPath.isTopLevelDataset()) {
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.DATASET);
Dataset ds = datasetController.getByProjectAndFullPath(project, datasetPath.getFullPath().toString());
datasetHelper.updateDataset(project, datasetPath, ds);
datasetPath.setInode(ds.getInode());
DatasetDTO dto = datasetBuilder.build(uriInfo, resourceRequest, user, datasetPath, null, null, false);
return Response.created(dto.getHref()).entity(dto).build();
} else {
datasetHelper.checkIfDatasetExists(project, datasetPath);
datasetHelper.updateDataset(project, datasetPath);
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.INODES);
Inode inode = inodeController.getInodeAtPath(datasetPath.getFullPath().toString());
datasetPath.setInode(inode);
InodeDTO dto = inodeBuilder.buildStat(uriInfo, resourceRequest, user, datasetPath, inode);
return Response.created(dto.getHref()).entity(dto).build();
}
case COPY:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.copy(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case MOVE:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.move(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case SHARE:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.share(targetProjectName, datasetPath.getFullPath().toString(), permission, project, user);
break;
case ACCEPT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.acceptShared(project, user, datasetPath.getDatasetSharedWith());
break;
case ZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.zip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.zip(project, user, datasetPath.getFullPath(), null);
}
break;
case UNZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.unzip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.unzip(project, user, datasetPath.getFullPath(), null);
}
break;
case REJECT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.rejectShared(datasetPath.getDatasetSharedWith());
break;
case PUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.shareWithCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case UNPUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareFromCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case IMPORT:
checkIfDataOwner(project, user);
Project srcProject = projectController.findProjectByName(targetProjectName);
datasetPath = datasetHelper.getDatasetPathIfFileExist(srcProject, path, datasetType);
datasetController.share(project.getName(), datasetPath.getFullPath().toString(), DatasetAccessPermission.READ_ONLY, srcProject, user);
break;
case UNSHARE_ALL:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareAll(datasetPath.getDataset(), user);
break;
default:
throw new WebApplicationException("Action not valid.", Response.Status.NOT_FOUND);
}
return Response.noContent().build();
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ValidationReportController method registerValidationReportToDisk.
private Inode registerValidationReportToDisk(Users user, Featuregroup featuregroup, ValidationReportDTO reportDTO, Date validationTime) throws FeaturestoreException {
DistributedFileSystemOps udfso = null;
Project project = featuregroup.getFeaturestore().getProject();
JSONObject reportJSON = convertValidationReportDTOToJson(reportDTO);
try {
udfso = dfs.getDfsOps(hdfsUsersController.getHdfsUserName(project, user));
// Dataset is confusing terminology. Get path to on_disk dataValidationDir
Dataset dataValidationDir = getOrCreateDataValidationDataset(project, user);
// All validation report attached to a particular featuregroup version will be stored in same directory
Path reportDirPath = new Path(datasetController.getDatasetPath(dataValidationDir), featuregroup.getName());
if (!udfso.isDir(reportDirPath.toString())) {
udfso.mkdir(reportDirPath.toString());
}
reportDirPath = new Path(reportDirPath, featuregroup.getVersion().toString());
if (!udfso.isDir(reportDirPath.toString())) {
udfso.mkdir(reportDirPath.toString());
}
reportDirPath = new Path(reportDirPath, "ValidationReports");
if (!udfso.isDir(reportDirPath.toString())) {
udfso.mkdir(reportDirPath.toString());
}
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HHmmss");
String fileName = String.format("validation_report_%s.json", formatter.format(validationTime));
Path reportPath = new Path(reportDirPath, fileName);
if (udfso.exists(reportPath)) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_SAVING_ON_DISK_VALIDATION_REPORT, Level.SEVERE, String.format("Validation report with file name %s already exists.", fileName));
}
udfso.create(reportPath, reportJSON.toString());
Inode inode = inodeController.getInodeAtPath(reportPath.toString());
return inode;
} catch (DatasetException | HopsSecurityException | IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_SAVING_ON_DISK_VALIDATION_REPORT, Level.WARNING, e.getMessage());
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ExperimentsBuilder method build.
// Build collection
public ExperimentDTO build(UriInfo uriInfo, ResourceRequest resourceRequest, Project project, Users user) throws ExperimentsException {
ExperimentDTO dto = new ExperimentDTO();
uri(dto, uriInfo, project);
expand(dto, resourceRequest);
dto.setCount(0l);
validatePagination(resourceRequest);
if (dto.isExpand()) {
try {
Pair<ProvStateParamBuilder, Map<Long, ExperimentsEndpointDTO>> provFilesParamBuilder = buildExperimentProvenanceParams(project, resourceRequest);
if (provFilesParamBuilder.getValue1().isEmpty()) {
// no endpoint - no results
return dto;
}
ProvStateDTO fileState = provenanceController.provFileStateList(project, provFilesParamBuilder.getValue0());
if (fileState != null) {
List<ProvStateDTO> experiments = fileState.getItems();
dto.setCount(fileState.getCount());
if (experiments != null && !experiments.isEmpty()) {
for (ProvStateDTO fileProvStateHit : experiments) {
ExperimentDTO experimentDTO = build(uriInfo, resourceRequest, project, user, provFilesParamBuilder.getValue1(), fileProvStateHit);
if (experimentDTO != null) {
dto.addItem(experimentDTO);
}
}
}
}
} catch (ExperimentsException | DatasetException | ProvenanceException | MetadataException | GenericException e) {
if (e instanceof ProvenanceException && ProvHelper.missingMappingForField((ProvenanceException) e)) {
LOGGER.log(Level.WARNING, "Could not find opensearch mapping for experiments query", e);
return dto;
} else {
throw new ExperimentsException(RESTCodes.ExperimentsErrorCode.EXPERIMENT_LIST_FAILED, Level.FINE, "Unable to list experiments for project " + project.getName(), e.getMessage(), e);
}
}
}
return dto;
}
Aggregations