use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ProjectService method getDatasetInfo.
@GET
@Path("getDatasetInfo/{inodeId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDatasetInfo(@PathParam("inodeId") Long inodeId, @Context SecurityContext sc) throws DatasetException {
Inode inode = inodes.findById(inodeId);
Project proj = datasetController.getOwningProject(inode);
Dataset ds = datasetFacade.findByProjectAndInode(proj, inode);
if (ds == null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE, "inodeId: " + inodeId);
}
Collection<DatasetSharedWith> projectsContainingInode = proj.getDatasetSharedWithCollection();
List<String> sharedWith = new ArrayList<>();
for (DatasetSharedWith d : projectsContainingInode) {
if (!d.getProject().getId().equals(proj.getId())) {
sharedWith.add(d.getProject().getName());
}
}
DataSetDTO dataset = new DataSetDTO(ds, proj, sharedWith);
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(dataset).build();
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class ProjectService method example.
@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
TourProjectType demoType;
try {
demoType = TourProjectType.fromString(type);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
ProjectDTO projectDTO = new ProjectDTO();
Project project = null;
projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
Users user = jWTHelper.getUserPrincipal(sc);
String username = user.getUsername();
List<String> projectServices = new ArrayList<>();
// save the project
String readMeMessage = null;
switch(demoType) {
case KAFKA:
// It's a Kafka guide
projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.KAFKA);
readMeMessage = "jar file to demonstrate Kafka streaming";
break;
case SPARK:
// It's a Spark guide
projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.SPARK);
readMeMessage = "jar file to demonstrate the creation of a spark batch job";
break;
case FS:
// It's a Featurestore guide
projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.FS);
readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
break;
case ML:
// It's a TensorFlow guide
projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.ML);
readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
break;
default:
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
projectDTO.setServices(projectServices);
DistributedFileSystemOps dfso = null;
DistributedFileSystemOps udfso = null;
try {
project = projectController.createProject(projectDTO, user, req.getSession().getId());
dfso = dfs.getDfsOps();
username = hdfsUsersBean.getHdfsUserName(project, user);
udfso = dfs.getDfsOps(username);
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
// TestJob dataset
datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
} catch (Exception ex) {
projectController.cleanup(project, req.getSession().getId());
throw ex;
} finally {
if (dfso != null) {
dfso.close();
}
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method checkFileExists.
/**
* Checks if a path exists. Will require a read access to the path.
* @param filePath
* @param username
* @throws DatasetException
*/
public void checkFileExists(Path filePath, String username) throws DatasetException {
DistributedFileSystemOps udfso = null;
boolean exist;
try {
udfso = dfs.getDfsOps(username);
exist = udfso.exists(filePath);
} catch (AccessControlException ae) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_ACCESS_PERMISSION_DENIED, Level.FINE, "path: " + filePath.toString(), ae.getMessage(), ae);
} catch (IOException ex) {
throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "path: " + filePath.toString(), ex.getMessage(), ex);
} finally {
dfs.closeDfsClient(udfso);
}
if (!exist) {
throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "path: " + filePath.toString());
}
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method updateSharePermission.
public void updateSharePermission(Dataset ds, DatasetAccessPermission datasetPermissions, Project project, String targetProjectName, Users user, DistributedFileSystemOps dfso) throws DatasetException, ProjectException {
if (ds.isShared(project)) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OWNER_ERROR, Level.FINE);
}
if (ds.isPublicDs()) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_PUBLIC_IMMUTABLE, Level.FINE);
}
Project targetProject = projectFacade.findByName(targetProjectName);
if (targetProject == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Target project not " + "found.");
}
DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, ds);
if (datasetSharedWith == null) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_SHARED_WITH_PROJECT, Level.FINE, "project: " + targetProject.getName());
}
PermissionTransition permissionTransition = PermissionTransition.valueOf(datasetSharedWith.getPermission(), datasetPermissions);
updateSharePermission(datasetSharedWith, permissionTransition, project, user, dfso);
}
use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.
the class DatasetController method acceptShared.
public void acceptShared(Project project, Users user, DatasetSharedWith datasetSharedWith) throws DatasetException {
acceptSharedDs(user, datasetSharedWith);
if (DatasetType.FEATURESTORE.equals(datasetSharedWith.getDataset().getDsType())) {
DatasetSharedWith trainingDataset = getOrCreateSharedTrainingDataset(project, datasetSharedWith.getDataset().getProject(), datasetSharedWith.getPermission(), datasetSharedWith.getSharedBy());
if (trainingDataset != null && !trainingDataset.getAccepted()) {
try {
acceptSharedDs(user, trainingDataset);
} catch (DatasetException de) {
// Dataset not shared or already accepted nothing to do
}
}
// If we migrate Training Datasets to remove the project prefix, these methods can be reused
acceptSharedFeatureStoreServiceDataset(project, datasetSharedWith, datasetSharedWith.getPermission(), datasetSharedWith.getSharedBy(), user, Settings.ServiceDataset.STATISTICS);
}
}
Aggregations