Search in sources :

Example 31 with GenericException

use of io.hops.hopsworks.exceptions.GenericException in project hopsworks by logicalclocks.

the class ProvUsageBuilder method buildAccessible.

public ProvArtifactUsageParentDTO buildAccessible(UriInfo uriInfo, Users user, DatasetPath targetEndpoint, String artifactId, Set<ProvUsageType> type) throws ProvenanceException, GenericException, DatasetException, MetadataException, SchematizedTagException {
    if (!accessController.hasAccess(targetEndpoint.getAccessProject(), targetEndpoint.getDataset())) {
        throw new GenericException(RESTCodes.GenericErrorCode.NOT_AUTHORIZED_TO_ACCESS, Level.FINE);
    }
    ProvArtifactUsageParentDTO usage = new ProvArtifactUsageParentDTO();
    usage.setArtifactId(artifactId);
    DatasetDTO datasetDTO = datasetBuilder.build(uriInfo, new ResourceRequest(ResourceRequest.Name.DATASET), user, targetEndpoint);
    usage.setDataset(datasetDTO);
    usage.setProjectId(targetEndpoint.getDataset().getProject().getId());
    usage.setProjectName(targetEndpoint.getDataset().getProject().getName());
    ProvOpsParamBuilder params = getBasicUsageOpsParams(targetEndpoint.getDataset(), artifactId);
    ProvOpsDTO ops = opsBuilder.build(targetEndpoint.getDataset().getProject(), params, ProvOpsReturnType.AGGREGATIONS);
    Optional<ProvOpsDTO> aggregation = ops.getItems().stream().filter(agg -> agg.getAggregation() != null && agg.getAggregation().equals(ProvOpsAggregations.APP_USAGE.toString())).findFirst();
    if (!aggregation.isPresent()) {
        return usage;
    }
    Optional<ProvOpsDTO> artifact = aggregation.get().getItems().stream().filter(art -> art.getMlId().equals(artifactId)).findFirst();
    if (!artifact.isPresent()) {
        return usage;
    }
    for (ProvUsageType t : type) {
        switch(t) {
            case READ_CURRENT:
                usage.setReadCurrent(usage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA, true));
                break;
            case WRITE_CURRENT:
                usage.setWriteCurrent(usage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA, true));
                break;
            case READ_LAST:
                lastUsage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA).ifPresent(usage::setReadLast);
                break;
            case WRITE_LAST:
                lastUsage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA).ifPresent(usage::setWriteLast);
                break;
            case READ_HISTORY:
                usage.setReadHistory(usage(uriInfo, artifact.get(), Provenance.FileOps.ACCESS_DATA, false));
                break;
            case WRITE_HISTORY:
                usage.setWriteHistory(usage(uriInfo, artifact.get(), Provenance.FileOps.MODIFY_DATA, false));
                break;
        }
    }
    return usage;
}
Also used : DatasetDTO(io.hops.hopsworks.api.dataset.DatasetDTO) UserFacade(io.hops.hopsworks.common.dao.user.UserFacade) ProvOps(io.hops.hopsworks.common.provenance.ops.ProvOps) AccessController(io.hops.hopsworks.common.util.AccessController) ExecutionFacade(io.hops.hopsworks.common.dao.jobhistory.ExecutionFacade) ExecutionsBuilder(io.hops.hopsworks.api.jobs.executions.ExecutionsBuilder) HdfsUsersFacade(io.hops.hopsworks.common.dao.hdfsUser.HdfsUsersFacade) Provenance(io.hops.hopsworks.common.provenance.core.Provenance) HdfsUsers(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers) Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) Level(java.util.logging.Level) UsersBuilder(io.hops.hopsworks.api.user.UsersBuilder) TransactionAttributeType(javax.ejb.TransactionAttributeType) MetadataException(io.hops.hopsworks.exceptions.MetadataException) TransactionAttribute(javax.ejb.TransactionAttribute) ProvOpsAggregations(io.hops.hopsworks.common.provenance.ops.ProvOpsAggregations) ResourceRequest(io.hops.hopsworks.common.api.ResourceRequest) UserDTO(io.hops.hopsworks.api.user.UserDTO) ProvenanceException(io.hops.hopsworks.exceptions.ProvenanceException) JobsBuilder(io.hops.hopsworks.api.jobs.JobsBuilder) EJB(javax.ejb.EJB) ExecutionDTO(io.hops.hopsworks.api.jobs.executions.ExecutionDTO) Stateless(javax.ejb.Stateless) Longs(com.google.common.primitives.Longs) ProvArtifactUsageParentDTO(io.hops.hopsworks.api.provenance.ops.dto.ProvArtifactUsageParentDTO) ProvOpsDTO(io.hops.hopsworks.common.provenance.ops.dto.ProvOpsDTO) DatasetDTO(io.hops.hopsworks.api.dataset.DatasetDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) Set(java.util.Set) ProvArtifactUsageDTO(io.hops.hopsworks.api.provenance.ops.dto.ProvArtifactUsageDTO) RESTCodes(io.hops.hopsworks.restutils.RESTCodes) Collectors(java.util.stream.Collectors) Pair(org.javatuples.Pair) ProvOpsParamBuilder(io.hops.hopsworks.common.provenance.ops.ProvOpsParamBuilder) List(java.util.List) ProvUsageType(io.hops.hopsworks.common.provenance.ops.ProvUsageType) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) GenericException(io.hops.hopsworks.exceptions.GenericException) ProvOpsReturnType(io.hops.hopsworks.common.provenance.ops.ProvOpsReturnType) DatasetPath(io.hops.hopsworks.common.dataset.util.DatasetPath) JobDTO(io.hops.hopsworks.api.jobs.JobDTO) Optional(java.util.Optional) UriInfo(javax.ws.rs.core.UriInfo) Users(io.hops.hopsworks.persistence.entity.user.Users) Comparator(java.util.Comparator) DatasetBuilder(io.hops.hopsworks.api.dataset.DatasetBuilder) SchematizedTagException(io.hops.hopsworks.exceptions.SchematizedTagException) ProvArtifactUsageParentDTO(io.hops.hopsworks.api.provenance.ops.dto.ProvArtifactUsageParentDTO) ProvOpsParamBuilder(io.hops.hopsworks.common.provenance.ops.ProvOpsParamBuilder) ProvOpsDTO(io.hops.hopsworks.common.provenance.ops.dto.ProvOpsDTO) ProvUsageType(io.hops.hopsworks.common.provenance.ops.ProvUsageType) ResourceRequest(io.hops.hopsworks.common.api.ResourceRequest) GenericException(io.hops.hopsworks.exceptions.GenericException)

Example 32 with GenericException

use of io.hops.hopsworks.exceptions.GenericException in project hopsworks by logicalclocks.

the class ProjectController method forceCleanup.

public String[] forceCleanup(String projectName, String userEmail, String sessionId) {
    CleanupLogger cleanupLogger = new CleanupLogger(projectName);
    DistributedFileSystemOps dfso = null;
    YarnClientWrapper yarnClientWrapper = null;
    try {
        dfso = dfs.getDfsOps();
        yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration());
        Project project = projectFacade.findByName(projectName);
        if (project != null) {
            cleanupLogger.logSuccess("Project found in the database");
            // Run custom handlers for project deletion
            try {
                ProjectHandler.runProjectPreDeleteHandlers(projectHandlers, project);
                cleanupLogger.logSuccess("Handlers successfully run");
            } catch (ProjectException e) {
                cleanupLogger.logError("Error running handlers during project cleanup");
                cleanupLogger.logError(e.getMessage());
            }
            // Remove from Project team
            try {
                updateProjectTeamRole(project, ProjectRoleTypes.UNDER_REMOVAL);
                cleanupLogger.logSuccess("Updated team role");
            } catch (Exception ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Get Yarn applications
            List<ApplicationReport> projectApps = null;
            try {
                Collection<ProjectTeam> team = project.getProjectTeamCollection();
                Set<String> hdfsUsers = new HashSet<>();
                for (ProjectTeam pt : team) {
                    String hdfsUsername = hdfsUsersController.getHdfsUserName(project, pt.getUser());
                    hdfsUsers.add(hdfsUsername);
                }
                projectApps = getYarnApplications(hdfsUsers, yarnClientWrapper.getYarnClient());
                cleanupLogger.logSuccess("Gotten Yarn applications");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when reading YARN apps during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Kill Yarn Jobs
            try {
                killYarnJobs(project);
                cleanupLogger.logSuccess("Killed Yarn jobs");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when killing YARN jobs during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // jupyter notebook server and sessions
            try {
                removeJupyter(project);
                cleanupLogger.logSuccess("Removed Jupyter");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing Anaconda during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Wait for Yarn logs
            try {
                waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient());
                cleanupLogger.logSuccess("Gotten logs for jobs");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when getting Yarn logs during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Log removal
            try {
                logProject(project, OperationType.Delete);
                cleanupLogger.logSuccess("Logged project removal");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when logging project removal during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Change ownership of root dir
            try {
                Path path = new Path(Utils.getProjectPath(project.getName()));
                changeOwnershipToSuperuser(path, dfso);
                cleanupLogger.logSuccess("Changed ownership of root Project dir");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when changing ownership of root Project dir during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Change ownership of tmp file
            Path dummy = new Path("/tmp/" + project.getName());
            try {
                changeOwnershipToSuperuser(dummy, dfso);
                cleanupLogger.logSuccess("Changed ownership of dummy inode");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when changing ownership of dummy inode during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove Kafka
            try {
                removeKafkaTopics(project);
                cleanupLogger.logSuccess("Removed Kafka topics");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing kafka topics during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove certificates
            try {
                certificatesController.revokeProjectCertificates(project);
                cleanupLogger.logSuccess("Removed certificates");
            } catch (HopsSecurityException ex) {
                if (ex.getErrorCode() != RESTCodes.SecurityErrorCode.CERTIFICATE_NOT_FOUND) {
                    cleanupLogger.logError("Error when removing certificates during project cleanup");
                    cleanupLogger.logError(ex.getMessage());
                }
            } catch (IOException | GenericException ex) {
                cleanupLogger.logError("Error when removing certificates during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            List<HdfsUsers> usersToClean = getUsersToClean(project);
            List<HdfsGroups> groupsToClean = getGroupsToClean(project);
            // Remove project related files
            try {
                removeProjectRelatedFiles(usersToClean, dfso);
                cleanupLogger.logSuccess("Removed project related files");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing project-related files during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove quotas
            try {
                removeQuotas(project);
                cleanupLogger.logSuccess("Removed quotas");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing quota during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Change owner for files in shared datasets
            try {
                fixSharedDatasets(project, dfso);
                cleanupLogger.logSuccess("Fixed shared datasets");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when changing ownership during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // 16) Delete Hive database - will automatically cleanup all the Hive's metadata
            try {
                hiveController.dropDatabases(project, dfso, true);
                cleanupLogger.logSuccess("Removed Hive db");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing hive db during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Delete elasticsearch template for this project
            try {
                removeElasticsearch(project);
                cleanupLogger.logSuccess("Removed ElasticSearch");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing elastic during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // delete project group and users
            try {
                removeGroupAndUsers(groupsToClean, usersToClean);
                cleanupLogger.logSuccess("Removed HDFS Groups and Users");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing HDFS groups/users during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // remove running tensorboards repos
            try {
                removeTensorBoard(project);
                cleanupLogger.logSuccess("Removed local TensorBoards");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing running TensorBoards during project cleanup");
            }
            try {
                servingController.deleteAll(project);
                cleanupLogger.logSuccess("Removed servings");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing serving instances");
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove project DAGs, JWT monitors and free X.509 certificates
            try {
                airflowManager.onProjectRemoval(project);
                cleanupLogger.logSuccess("Removed Airflow DAGs and security references");
            } catch (Exception ex) {
                cleanupLogger.logError("Error while cleaning Airflow DAGs and security references");
                cleanupLogger.logError(ex.getMessage());
            }
            try {
                removeCertificatesFromMaterializer(project);
                cleanupLogger.logSuccess("Removed all X.509 certificates related to the Project from " + "CertificateMaterializer");
            } catch (Exception ex) {
                cleanupLogger.logError("Error while force removing Project certificates from CertificateMaterializer");
                cleanupLogger.logError(ex.getMessage());
            }
            // remove conda envs
            try {
                removeAnacondaEnv(project);
                cleanupLogger.logSuccess("Removed conda envs");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing conda envs during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // remove dumy Inode
            try {
                dfso.rm(dummy, true);
                cleanupLogger.logSuccess("Removed dummy Inode");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing dummy Inode during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // remove folder
            try {
                removeProjectFolder(project.getName(), dfso);
                cleanupLogger.logSuccess("Removed root Project folder");
            } catch (Exception ex) {
                cleanupLogger.logError("Error when removing root Project dir during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            try {
                removeAlertConfigs(project);
                cleanupLogger.logSuccess("Cleaning alert manager config from project");
            } catch (Exception ex) {
                cleanupLogger.logError("Error cleaning alert manager config during project cleanup");
                cleanupLogger.logError(ex.getMessage());
            }
            // Run custom handlers for project deletion
            try {
                ProjectHandler.runProjectPostDeleteHandlers(projectHandlers, project);
                cleanupLogger.logSuccess("Handlers successfully run");
            } catch (ProjectException e) {
                cleanupLogger.logError("Error running handlers during project cleanup");
                cleanupLogger.logError(e.getMessage());
            }
        } else {
            // Create /tmp/Project and add to database so we lock in case someone tries to create a Project
            // with the same name at the same time
            cleanupLogger.logSuccess("Project is *NOT* in the database, going to remove as much as possible");
            Date now = DateUtils.localDateTime2Date(DateUtils.getNow());
            Users user = userFacade.findByEmail(userEmail);
            Project toDeleteProject = new Project(projectName, user, now, settings.getDefaultPaymentType());
            toDeleteProject.setKafkaMaxNumTopics(settings.getKafkaMaxNumTopics());
            Path tmpInodePath = new Path(File.separator + "tmp" + File.separator + projectName);
            try {
                if (!dfso.exists(tmpInodePath.toString())) {
                    dfso.touchz(tmpInodePath);
                }
                Inode tmpInode = inodeController.getInodeAtPath(tmpInodePath.toString());
                if (tmpInode != null) {
                    toDeleteProject.setInode(tmpInode);
                    projectFacade.persistProject(toDeleteProject);
                    projectFacade.flushEm();
                    cleanupLogger.logSuccess("Created dummy Inode");
                }
            } catch (IOException ex) {
                cleanupLogger.logError("Could not create dummy Inode, moving on unsafe");
            }
            // Kill jobs
            List<HdfsUsers> projectHdfsUsers = hdfsUsersController.getAllProjectHdfsUsers(projectName);
            try {
                Set<String> hdfsUsersStr = new HashSet<>();
                for (HdfsUsers hdfsUser : projectHdfsUsers) {
                    hdfsUsersStr.add(hdfsUser.getName());
                }
                List<ApplicationReport> projectApps = getYarnApplications(hdfsUsersStr, yarnClientWrapper.getYarnClient());
                waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient());
                cleanupLogger.logSuccess("Killed all Yarn Applications");
            } catch (Exception ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove project related files
            try {
                removeProjectRelatedFiles(projectHdfsUsers, dfso);
                cleanupLogger.logSuccess("Removed project related files from HDFS");
            } catch (IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove Hive database
            try {
                hiveController.dropDatabases(toDeleteProject, dfso, true);
                cleanupLogger.logSuccess("Dropped Hive database");
            } catch (IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove ElasticSearch index
            try {
                removeElasticsearch(toDeleteProject);
                cleanupLogger.logSuccess("Removed ElasticSearch");
            } catch (Exception ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove HDFS Groups and Users
            try {
                List<HdfsGroups> projectHdfsGroups = hdfsUsersController.getAllProjectHdfsGroups(projectName);
                removeGroupAndUsers(projectHdfsGroups, projectHdfsUsers);
                cleanupLogger.logSuccess("Removed HDFS Groups and Users");
            } catch (IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove Yarn project quota
            try {
                removeQuotas(toDeleteProject);
                cleanupLogger.logSuccess("Removed project quota");
            } catch (Exception ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            List<ProjectTeam> reconstructedProjectTeam = new ArrayList<>();
            try {
                for (HdfsUsers hdfsUser : hdfsUsersController.getAllProjectHdfsUsers(projectName)) {
                    Users foundUser = userFacade.findByUsername(hdfsUser.getUsername());
                    if (foundUser != null) {
                        reconstructedProjectTeam.add(new ProjectTeam(toDeleteProject, foundUser));
                    }
                }
            } catch (Exception ex) {
            // NOOP
            }
            toDeleteProject.setProjectTeamCollection(reconstructedProjectTeam);
            try {
                airflowManager.onProjectRemoval(toDeleteProject);
                cleanupLogger.logSuccess("Removed Airflow DAGs and security references");
            } catch (Exception ex) {
                cleanupLogger.logError("Failed to remove Airflow DAGs and security references");
                cleanupLogger.logError(ex.getMessage());
            }
            try {
                removeCertificatesFromMaterializer(toDeleteProject);
                cleanupLogger.logSuccess("Freed all x.509 references from CertificateMaterializer");
            } catch (Exception ex) {
                cleanupLogger.logError("Failed to free all X.509 references from CertificateMaterializer");
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove Certificates
            try {
                certificatesController.revokeProjectCertificates(toDeleteProject);
                userCertsFacade.removeAllCertsOfAProject(projectName);
                cleanupLogger.logSuccess("Deleted certificates");
            } catch (HopsSecurityException | GenericException | IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove root project directory
            try {
                removeProjectFolder(projectName, dfso);
                cleanupLogger.logSuccess("Removed root project directory");
            } catch (IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
            // Remove /tmp/project
            try {
                dfso.rm(new Path(File.separator + "tmp" + File.separator + projectName), true);
                cleanupLogger.logSuccess("Removed /tmp");
            } catch (IOException ex) {
                cleanupLogger.logError(ex.getMessage());
            }
        }
    } finally {
        dfs.closeDfsClient(dfso);
        ycs.closeYarnClient(yarnClientWrapper);
        LOGGER.log(Level.INFO, cleanupLogger.getSuccessLog().toString());
        String errorLog = cleanupLogger.getErrorLog().toString();
        if (!errorLog.isEmpty()) {
            LOGGER.log(Level.SEVERE, errorLog);
        }
        sendInbox(cleanupLogger.getSuccessLog().append("\n").append(cleanupLogger.getErrorLog()).append("\n").toString(), userEmail);
    }
    String[] logs = new String[2];
    logs[0] = cleanupLogger.getSuccessLog().toString();
    logs[1] = cleanupLogger.getErrorLog().toString();
    return logs;
}
Also used : ArrayList(java.util.ArrayList) HdfsUsers(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers) Users(io.hops.hopsworks.persistence.entity.user.Users) GenericException(io.hops.hopsworks.exceptions.GenericException) HdfsUsers(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) ProjectTeam(io.hops.hopsworks.persistence.entity.project.team.ProjectTeam) YarnClientWrapper(io.hops.hopsworks.common.yarn.YarnClientWrapper) HashSet(java.util.HashSet) Path(org.apache.hadoop.fs.Path) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) IOException(java.io.IOException) TensorBoardException(io.hops.hopsworks.exceptions.TensorBoardException) DatasetException(io.hops.hopsworks.exceptions.DatasetException) EJBException(javax.ejb.EJBException) AlertException(io.hops.hopsworks.exceptions.AlertException) PythonException(io.hops.hopsworks.exceptions.PythonException) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException) RESTException(io.hops.hopsworks.restutils.RESTException) SQLException(java.sql.SQLException) ElasticException(io.hops.hopsworks.exceptions.ElasticException) AlertManagerConfigUpdateException(io.hops.hopsworks.alerting.exceptions.AlertManagerConfigUpdateException) IOException(java.io.IOException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) UserException(io.hops.hopsworks.exceptions.UserException) ExecutionException(java.util.concurrent.ExecutionException) ServingException(io.hops.hopsworks.exceptions.ServingException) AlertManagerResponseException(io.hops.hopsworks.alerting.exceptions.AlertManagerResponseException) CryptoPasswordNotFoundException(io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) AlertManagerUnreachableException(io.hops.hopsworks.alert.exception.AlertManagerUnreachableException) AlertManagerConfigReadException(io.hops.hopsworks.alerting.exceptions.AlertManagerConfigReadException) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) JobException(io.hops.hopsworks.exceptions.JobException) GenericException(io.hops.hopsworks.exceptions.GenericException) AlertManagerConfigCtrlCreateException(io.hops.hopsworks.alerting.exceptions.AlertManagerConfigCtrlCreateException) KafkaException(io.hops.hopsworks.exceptions.KafkaException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) ProvenanceException(io.hops.hopsworks.exceptions.ProvenanceException) AlertManagerClientCreateException(io.hops.hopsworks.alerting.exceptions.AlertManagerClientCreateException) SchemaException(io.hops.hopsworks.exceptions.SchemaException) Date(java.util.Date) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) JupyterProject(io.hops.hopsworks.persistence.entity.jupyter.JupyterProject) Project(io.hops.hopsworks.persistence.entity.project.Project) Inode(io.hops.hopsworks.persistence.entity.hdfs.inode.Inode) HdfsGroups(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsGroups)

Example 33 with GenericException

use of io.hops.hopsworks.exceptions.GenericException in project hopsworks by logicalclocks.

the class ProjectController method removeMemberFromTeam.

public void removeMemberFromTeam(Project project, Users userToBeRemoved) throws ProjectException, ServiceException, IOException, GenericException, JobException, HopsSecurityException, TensorBoardException, FeaturestoreException {
    ProjectTeam projectTeam = projectTeamFacade.findProjectTeam(project, userToBeRemoved);
    if (projectTeam == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.TEAM_MEMBER_NOT_FOUND, Level.FINE, "project: " + project + ", user: " + userToBeRemoved.getEmail());
    }
    // Not able to remove project owner regardless of who is trying to remove the member
    if (project.getOwner().equals(userToBeRemoved)) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_OWNER_NOT_ALLOWED, Level.FINE);
    }
    projectTeamFacade.removeProjectTeam(project, userToBeRemoved);
    String hdfsUser = hdfsUsersController.getHdfsUserName(project, userToBeRemoved);
    YarnClientWrapper yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration());
    YarnClient client = yarnClientWrapper.getYarnClient();
    try {
        Set<String> hdfsUsers = new HashSet<>();
        hdfsUsers.add(hdfsUser);
        List<ApplicationReport> projectsApps = client.getApplications(null, hdfsUsers, null, EnumSet.of(YarnApplicationState.ACCEPTED, YarnApplicationState.NEW, YarnApplicationState.NEW_SAVING, YarnApplicationState.RUNNING, YarnApplicationState.SUBMITTED));
        // kill jupyter for this user
        JupyterProject jupyterProject = jupyterFacade.findByUser(hdfsUser);
        if (jupyterProject != null) {
            jupyterController.shutdown(project, hdfsUser, userToBeRemoved, jupyterProject.getSecret(), jupyterProject.getCid(), jupyterProject.getPort());
        }
        // kill running TB if any
        tensorBoardController.cleanup(project, userToBeRemoved);
        // kill all jobs run by this user.
        // kill jobs
        List<Jobs> running = jobFacade.getRunningJobs(project, hdfsUser);
        if (running != null && !running.isEmpty()) {
            for (Jobs job : running) {
                executionController.stop(job);
            }
        }
        // wait that log aggregation for the jobs finish
        for (ApplicationReport appReport : projectsApps) {
            FinalApplicationStatus finalState = appReport.getFinalApplicationStatus();
            while (finalState.equals(FinalApplicationStatus.UNDEFINED)) {
                client.killApplication(appReport.getApplicationId());
                appReport = client.getApplicationReport(appReport.getApplicationId());
                finalState = appReport.getFinalApplicationStatus();
            }
            YarnLogUtil.waitForLogAggregation(client, appReport.getApplicationId());
        }
    } catch (YarnException | IOException | InterruptedException e) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.KILL_MEMBER_JOBS, Level.SEVERE, "project: " + project + ", user: " + userToBeRemoved, e.getMessage(), e);
    } finally {
        ycs.closeYarnClient(yarnClientWrapper);
    }
    // trigger project team role remove handlers
    ProjectTeamRoleHandler.runProjectTeamRoleRemoveMembersHandlers(projectTeamRoleHandlers, project, Collections.singletonList(userToBeRemoved));
    // Revoke privileges for online feature store
    if (projectServiceFacade.isServiceEnabledForProject(project, ProjectServiceEnum.FEATURESTORE)) {
        Featurestore featurestore = featurestoreController.getProjectFeaturestore(project);
        onlineFeaturestoreController.removeOnlineFeaturestoreUser(featurestore, userToBeRemoved);
    }
    kafkaController.removeProjectMemberFromTopics(project, userToBeRemoved);
    certificateMaterializer.forceRemoveLocalMaterial(userToBeRemoved.getUsername(), project.getName(), null, false);
    try {
        certificatesController.revokeUserSpecificCertificates(project, userToBeRemoved);
    } catch (HopsSecurityException ex) {
        if (ex.getErrorCode() != RESTCodes.SecurityErrorCode.CERTIFICATE_NOT_FOUND) {
            LOGGER.log(Level.SEVERE, "Could not delete certificates when removing member " + userToBeRemoved.getUsername() + " from project " + project.getName() + ". Manual cleanup is needed!!!", ex);
            throw ex;
        }
    } catch (IOException | GenericException ex) {
        LOGGER.log(Level.SEVERE, "Could not delete certificates when removing member " + userToBeRemoved.getUsername() + " from project " + project.getName() + ". Manual cleanup is needed!!!", ex);
        throw ex;
    }
    // TODO: projectTeam might be null?
    hdfsUsersController.removeMember(projectTeam);
}
Also used : FinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus) JupyterProject(io.hops.hopsworks.persistence.entity.jupyter.JupyterProject) IOException(java.io.IOException) GenericException(io.hops.hopsworks.exceptions.GenericException) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) ProjectTeam(io.hops.hopsworks.persistence.entity.project.team.ProjectTeam) Featurestore(io.hops.hopsworks.persistence.entity.featurestore.Featurestore) Jobs(io.hops.hopsworks.persistence.entity.jobs.description.Jobs) YarnClientWrapper(io.hops.hopsworks.common.yarn.YarnClientWrapper) HashSet(java.util.HashSet)

Example 34 with GenericException

use of io.hops.hopsworks.exceptions.GenericException in project hopsworks by logicalclocks.

the class HopsworksJAXBContext method marshal.

public <V> String marshal(V obj) throws GenericException {
    try {
        Marshaller marshaller = context.createMarshaller();
        StringWriter sw = new StringWriter();
        marshaller.marshal(obj, sw);
        return sw.toString();
    } catch (JAXBException e) {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.INFO, "jaxb marshal exception");
    }
}
Also used : Marshaller(javax.xml.bind.Marshaller) StringWriter(java.io.StringWriter) JAXBException(javax.xml.bind.JAXBException) GenericException(io.hops.hopsworks.exceptions.GenericException)

Example 35 with GenericException

use of io.hops.hopsworks.exceptions.GenericException in project hopsworks by logicalclocks.

the class HopsworksJAXBContext method unmarshalList.

public <V> List<V> unmarshalList(String json, Class<V> type) throws GenericException {
    try {
        Unmarshaller unmarshaller = context.createUnmarshaller();
        StreamSource ss = new StreamSource(new StringReader(json));
        JAXBElement<V> e = unmarshaller.unmarshal(ss, type);
        // this cast is mainly because of weird behaviour of jaxb combined with java generics
        return (List<V>) e.getValue();
    } catch (JAXBException e) {
        throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_STATE, Level.INFO, "jaxb unmarshall exception");
    }
}
Also used : StreamSource(javax.xml.transform.stream.StreamSource) JAXBException(javax.xml.bind.JAXBException) StringReader(java.io.StringReader) List(java.util.List) Unmarshaller(javax.xml.bind.Unmarshaller) GenericException(io.hops.hopsworks.exceptions.GenericException)

Aggregations

GenericException (io.hops.hopsworks.exceptions.GenericException)43 Users (io.hops.hopsworks.persistence.entity.user.Users)17 Project (io.hops.hopsworks.persistence.entity.project.Project)16 ProjectException (io.hops.hopsworks.exceptions.ProjectException)13 DatasetException (io.hops.hopsworks.exceptions.DatasetException)12 ServiceException (io.hops.hopsworks.exceptions.ServiceException)12 IOException (java.io.IOException)12 Path (javax.ws.rs.Path)11 ElasticException (io.hops.hopsworks.exceptions.ElasticException)10 HopsSecurityException (io.hops.hopsworks.exceptions.HopsSecurityException)10 JobException (io.hops.hopsworks.exceptions.JobException)9 ProvenanceException (io.hops.hopsworks.exceptions.ProvenanceException)9 Produces (javax.ws.rs.Produces)9 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)8 ArrayList (java.util.ArrayList)8 TransactionAttribute (javax.ejb.TransactionAttribute)8 ServiceDiscoveryException (com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException)6 UserException (io.hops.hopsworks.exceptions.UserException)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6 HdfsUsers (io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers)6