Search in sources :

Example 11 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ProjectService method getDatasetInfo.

@GET
@Path("getDatasetInfo/{inodeId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDatasetInfo(@PathParam("inodeId") Long inodeId, @Context SecurityContext sc) throws DatasetException {
    Inode inode = inodes.findById(inodeId);
    Project proj = datasetController.getOwningProject(inode);
    Dataset ds = datasetFacade.findByProjectAndInode(proj, inode);
    if (ds == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_FOUND, Level.FINE, "inodeId: " + inodeId);
    }
    Collection<DatasetSharedWith> projectsContainingInode = proj.getDatasetSharedWithCollection();
    List<String> sharedWith = new ArrayList<>();
    for (DatasetSharedWith d : projectsContainingInode) {
        if (!d.getProject().getId().equals(proj.getId())) {
            sharedWith.add(d.getProject().getName());
        }
    }
    DataSetDTO dataset = new DataSetDTO(ds, proj, sharedWith);
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(dataset).build();
}
Also used : Project(io.hops.hopsworks.persistence.entity.project.Project) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) ArrayList(java.util.ArrayList) DataSetDTO(io.hops.hopsworks.common.dao.dataset.DataSetDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 12 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class ProjectService method example.

@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
    TourProjectType demoType;
    try {
        demoType = TourProjectType.fromString(type);
    } catch (IllegalArgumentException e) {
        throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
    }
    ProjectDTO projectDTO = new ProjectDTO();
    Project project = null;
    projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
    Users user = jWTHelper.getUserPrincipal(sc);
    String username = user.getUsername();
    List<String> projectServices = new ArrayList<>();
    // save the project
    String readMeMessage = null;
    switch(demoType) {
        case KAFKA:
            // It's a Kafka guide
            projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.KAFKA);
            readMeMessage = "jar file to demonstrate Kafka streaming";
            break;
        case SPARK:
            // It's a Spark guide
            projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.SPARK);
            readMeMessage = "jar file to demonstrate the creation of a spark batch job";
            break;
        case FS:
            // It's a Featurestore guide
            projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.FS);
            readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
            break;
        case ML:
            // It's a TensorFlow guide
            projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.ML);
            readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
            break;
        default:
            throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
    }
    projectDTO.setServices(projectServices);
    DistributedFileSystemOps dfso = null;
    DistributedFileSystemOps udfso = null;
    try {
        project = projectController.createProject(projectDTO, user, req.getSession().getId());
        dfso = dfs.getDfsOps();
        username = hdfsUsersBean.getHdfsUserName(project, user);
        udfso = dfs.getDfsOps(username);
        ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
        String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
        // TestJob dataset
        datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
    } catch (Exception ex) {
        projectController.cleanup(project, req.getSession().getId());
        throw ex;
    } finally {
        if (dfso != null) {
            dfso.close();
        }
        if (udfso != null) {
            dfs.closeDfsClient(udfso);
        }
    }
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
Also used : TourProjectType(io.hops.hopsworks.common.project.TourProjectType) ProjectDTO(io.hops.hopsworks.common.project.ProjectDTO) Project(io.hops.hopsworks.persistence.entity.project.Project) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) ArrayList(java.util.ArrayList) Users(io.hops.hopsworks.persistence.entity.user.Users) ProvTypeDTO(io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException) ElasticException(io.hops.hopsworks.exceptions.ElasticException) IOException(java.io.IOException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) UserException(io.hops.hopsworks.exceptions.UserException) ExecutionException(java.util.concurrent.ExecutionException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) JobException(io.hops.hopsworks.exceptions.JobException) GenericException(io.hops.hopsworks.exceptions.GenericException) KafkaException(io.hops.hopsworks.exceptions.KafkaException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ProvenanceException(io.hops.hopsworks.exceptions.ProvenanceException) SchemaException(io.hops.hopsworks.exceptions.SchemaException) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces)

Example 13 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method checkFileExists.

/**
 * Checks if a path exists. Will require a read access to the path.
 * @param filePath
 * @param username
 * @throws DatasetException
 */
public void checkFileExists(Path filePath, String username) throws DatasetException {
    DistributedFileSystemOps udfso = null;
    boolean exist;
    try {
        udfso = dfs.getDfsOps(username);
        exist = udfso.exists(filePath);
    } catch (AccessControlException ae) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_ACCESS_PERMISSION_DENIED, Level.FINE, "path: " + filePath.toString(), ae.getMessage(), ae);
    } catch (IOException ex) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "path: " + filePath.toString(), ex.getMessage(), ex);
    } finally {
        dfs.closeDfsClient(udfso);
    }
    if (!exist) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.INODE_NOT_FOUND, Level.FINE, "path: " + filePath.toString());
    }
}
Also used : DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 14 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method updateSharePermission.

public void updateSharePermission(Dataset ds, DatasetAccessPermission datasetPermissions, Project project, String targetProjectName, Users user, DistributedFileSystemOps dfso) throws DatasetException, ProjectException {
    if (ds.isShared(project)) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OWNER_ERROR, Level.FINE);
    }
    if (ds.isPublicDs()) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_PUBLIC_IMMUTABLE, Level.FINE);
    }
    Project targetProject = projectFacade.findByName(targetProjectName);
    if (targetProject == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Target project not " + "found.");
    }
    DatasetSharedWith datasetSharedWith = datasetSharedWithFacade.findByProjectAndDataset(targetProject, ds);
    if (datasetSharedWith == null) {
        throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NOT_SHARED_WITH_PROJECT, Level.FINE, "project: " + targetProject.getName());
    }
    PermissionTransition permissionTransition = PermissionTransition.valueOf(datasetSharedWith.getPermission(), datasetPermissions);
    updateSharePermission(datasetSharedWith, permissionTransition, project, user, dfso);
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) Project(io.hops.hopsworks.persistence.entity.project.Project) DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) PermissionTransition(io.hops.hopsworks.persistence.entity.dataset.PermissionTransition) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Example 15 with DatasetException

use of io.hops.hopsworks.exceptions.DatasetException in project hopsworks by logicalclocks.

the class DatasetController method acceptShared.

public void acceptShared(Project project, Users user, DatasetSharedWith datasetSharedWith) throws DatasetException {
    acceptSharedDs(user, datasetSharedWith);
    if (DatasetType.FEATURESTORE.equals(datasetSharedWith.getDataset().getDsType())) {
        DatasetSharedWith trainingDataset = getOrCreateSharedTrainingDataset(project, datasetSharedWith.getDataset().getProject(), datasetSharedWith.getPermission(), datasetSharedWith.getSharedBy());
        if (trainingDataset != null && !trainingDataset.getAccepted()) {
            try {
                acceptSharedDs(user, trainingDataset);
            } catch (DatasetException de) {
            // Dataset not shared or already accepted nothing to do
            }
        }
        // If we migrate Training Datasets to remove the project prefix, these methods can be reused
        acceptSharedFeatureStoreServiceDataset(project, datasetSharedWith, datasetSharedWith.getPermission(), datasetSharedWith.getSharedBy(), user, Settings.ServiceDataset.STATISTICS);
    }
}
Also used : DatasetSharedWith(io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith) DatasetException(io.hops.hopsworks.exceptions.DatasetException)

Aggregations

DatasetException (io.hops.hopsworks.exceptions.DatasetException)61 IOException (java.io.IOException)25 Project (io.hops.hopsworks.persistence.entity.project.Project)23 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)21 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)18 ProjectException (io.hops.hopsworks.exceptions.ProjectException)12 DatasetSharedWith (io.hops.hopsworks.persistence.entity.dataset.DatasetSharedWith)12 Produces (javax.ws.rs.Produces)12 Inode (io.hops.hopsworks.persistence.entity.hdfs.inode.Inode)11 DatasetPath (io.hops.hopsworks.common.dataset.util.DatasetPath)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 Users (io.hops.hopsworks.persistence.entity.user.Users)10 Path (javax.ws.rs.Path)10 Path (org.apache.hadoop.fs.Path)10 GenericException (io.hops.hopsworks.exceptions.GenericException)9 AccessControlException (org.apache.hadoop.security.AccessControlException)8 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)7 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6