use of io.hops.hopsworks.exceptions.HopsSecurityException in project hopsworks by logicalclocks.
the class DatasetController method move.
public void move(Project project, Users user, Path sourcePath, Path destPath, Dataset sourceDataset, Dataset destDataset) throws DatasetException, HopsSecurityException {
String username = hdfsUsersController.getHdfsUserName(project, user);
if (!getOwningProject(sourceDataset).equals(destDataset.getProject())) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_FORBIDDEN, Level.FINE, "Cannot copy file/folder from another project.");
}
if (destDataset.isPublicDs()) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_FORBIDDEN, Level.FINE, "Can not move to a public dataset.");
}
DistributedFileSystemOps udfso = null;
// We need super-user to change owner
DistributedFileSystemOps dfso = null;
try {
// If a Data Scientist requested it, do it as project user to avoid deleting Data Owner files
// Find project of dataset as it might be shared
Project owning = getOwningProject(sourceDataset);
boolean isMember = projectTeamFacade.isUserMemberOfProject(owning, user);
if (isMember && projectTeamFacade.findCurrentRole(owning, user).equals(AllowedRoles.DATA_OWNER) && owning.equals(project)) {
// do it as super user
udfso = dfs.getDfsOps();
} else {
// do it as project user
udfso = dfs.getDfsOps(username);
}
dfso = dfs.getDfsOps();
if (udfso.exists(destPath.toString())) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DESTINATION_EXISTS, Level.FINE, "destination: " + destPath.toString());
}
// Get destination folder permissions
FsPermission permission = udfso.getFileStatus(destPath.getParent()).getPermission();
String group = udfso.getFileStatus(destPath.getParent()).getGroup();
String owner = udfso.getFileStatus(sourcePath).getOwner();
udfso.moveWithinHdfs(sourcePath, destPath);
// Change permissions recursively
recChangeOwnershipAndPermission(destPath, permission, owner, group, dfso, udfso);
} catch (AccessControlException ex) {
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.HDFS_ACCESS_CONTROL, Level.FINE, "Operation: move, from: " + sourcePath.toString() + " to: " + destPath.toString());
} catch (IOException ex) {
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_ERROR, Level.SEVERE, "move operation failed for: " + sourcePath.toString(), ex.getMessage(), ex);
} finally {
dfs.closeDfsClient(udfso);
dfs.closeDfsClient(dfso);
}
}
use of io.hops.hopsworks.exceptions.HopsSecurityException in project hopsworks by logicalclocks.
the class StatisticsController method deleteStatistics.
private void deleteStatistics(Project project, Users user, String entityName, String entitySubDir, Integer version) throws FeaturestoreException {
DistributedFileSystemOps udfso = null;
try {
udfso = dfs.getDfsOps(hdfsUsersController.getHdfsUserName(project, user));
String dirName = entityName + "_" + version;
Dataset statistics = getOrCreateStatisticsDataset(project, user);
// Construct the directory path
Path subDir = new Path(datasetController.getDatasetPath(statistics), entitySubDir);
Path dirPath = new Path(subDir, dirName);
// delete json files
udfso.rm(dirPath, true);
} catch (DatasetException | HopsSecurityException | IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_DELETING_STATISTICS, Level.WARNING, "", e.getMessage(), e);
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.exceptions.HopsSecurityException in project hopsworks by logicalclocks.
the class GitExecutionController method createExecution.
/**
* initializes the execution of all git commands
*
* @param gitCommandConfiguration
* @param project
* @param hopsworksUser
* @param repository
* @return
* @throws HopsSecurityException
* @throws GitOpException
*/
public GitOpExecution createExecution(GitCommandConfiguration gitCommandConfiguration, Project project, Users hopsworksUser, GitRepository repository) throws HopsSecurityException, GitOpException {
String hdfsUsername = hdfsUsersController.getHdfsUserName(project, hopsworksUser);
BasicAuthSecrets authSecrets = gitCommandOperationUtil.getAuthenticationSecrets(hopsworksUser, repository.getGitProvider());
commandConfigurationValidator.validateProviderConfiguration(authSecrets, gitCommandConfiguration);
String configSecret = DigestUtils.sha256Hex(Integer.toString(ThreadLocalRandom.current().nextInt()));
lockRepository(repository.getId());
GitOpExecution gitOpExecution = null;
DistributedFileSystemOps udfso = null;
try {
udfso = dfsService.getDfsOps(hdfsUsername);
GitPaths gitPaths = prepareCommandExecution(project, hopsworksUser, udfso, configSecret);
gitOpExecution = gitOpExecutionFacade.create(gitCommandConfiguration, hopsworksUser, repository, configSecret);
argumentsWriter.createArgumentFile(gitOpExecution, gitPaths, authSecrets);
gitCommandExecutor.execute(gitOpExecution, gitPaths);
return gitOpExecution;
} catch (Exception ex) {
gitRepositoryFacade.updateRepositoryCid(repository, null);
gitCommandOperationUtil.cleanUp(project, hopsworksUser, configSecret);
if (ex instanceof IOException) {
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERT_MATERIALIZATION_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
}
throw new GitOpException(RESTCodes.GitOpErrorCode.GIT_OPERATION_ERROR, Level.SEVERE, ex.getMessage());
} finally {
if (udfso != null) {
dfsService.closeDfsClient(udfso);
}
}
}
use of io.hops.hopsworks.exceptions.HopsSecurityException in project hopsworks by logicalclocks.
the class ProjectController method forceCleanup.
public String[] forceCleanup(String projectName, String userEmail, String sessionId) {
CleanupLogger cleanupLogger = new CleanupLogger(projectName);
DistributedFileSystemOps dfso = null;
YarnClientWrapper yarnClientWrapper = null;
try {
dfso = dfs.getDfsOps();
yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration());
Project project = projectFacade.findByName(projectName);
if (project != null) {
cleanupLogger.logSuccess("Project found in the database");
// Run custom handlers for project deletion
try {
ProjectHandler.runProjectPreDeleteHandlers(projectHandlers, project);
cleanupLogger.logSuccess("Handlers successfully run");
} catch (ProjectException e) {
cleanupLogger.logError("Error running handlers during project cleanup");
cleanupLogger.logError(e.getMessage());
}
// Remove from Project team
try {
updateProjectTeamRole(project, ProjectRoleTypes.UNDER_REMOVAL);
cleanupLogger.logSuccess("Updated team role");
} catch (Exception ex) {
cleanupLogger.logError(ex.getMessage());
}
// Get Yarn applications
List<ApplicationReport> projectApps = null;
try {
Collection<ProjectTeam> team = project.getProjectTeamCollection();
Set<String> hdfsUsers = new HashSet<>();
for (ProjectTeam pt : team) {
String hdfsUsername = hdfsUsersController.getHdfsUserName(project, pt.getUser());
hdfsUsers.add(hdfsUsername);
}
projectApps = getYarnApplications(hdfsUsers, yarnClientWrapper.getYarnClient());
cleanupLogger.logSuccess("Gotten Yarn applications");
} catch (Exception ex) {
cleanupLogger.logError("Error when reading YARN apps during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Kill Yarn Jobs
try {
killYarnJobs(project);
cleanupLogger.logSuccess("Killed Yarn jobs");
} catch (Exception ex) {
cleanupLogger.logError("Error when killing YARN jobs during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// jupyter notebook server and sessions
try {
removeJupyter(project);
cleanupLogger.logSuccess("Removed Jupyter");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing Anaconda during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Wait for Yarn logs
try {
waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient());
cleanupLogger.logSuccess("Gotten logs for jobs");
} catch (Exception ex) {
cleanupLogger.logError("Error when getting Yarn logs during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Log removal
try {
logProject(project, OperationType.Delete);
cleanupLogger.logSuccess("Logged project removal");
} catch (Exception ex) {
cleanupLogger.logError("Error when logging project removal during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Change ownership of root dir
try {
Path path = new Path(Utils.getProjectPath(project.getName()));
changeOwnershipToSuperuser(path, dfso);
cleanupLogger.logSuccess("Changed ownership of root Project dir");
} catch (Exception ex) {
cleanupLogger.logError("Error when changing ownership of root Project dir during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Change ownership of tmp file
Path dummy = new Path("/tmp/" + project.getName());
try {
changeOwnershipToSuperuser(dummy, dfso);
cleanupLogger.logSuccess("Changed ownership of dummy inode");
} catch (Exception ex) {
cleanupLogger.logError("Error when changing ownership of dummy inode during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Remove Kafka
try {
removeKafkaTopics(project);
cleanupLogger.logSuccess("Removed Kafka topics");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing kafka topics during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Remove certificates
try {
certificatesController.revokeProjectCertificates(project);
cleanupLogger.logSuccess("Removed certificates");
} catch (HopsSecurityException ex) {
if (ex.getErrorCode() != RESTCodes.SecurityErrorCode.CERTIFICATE_NOT_FOUND) {
cleanupLogger.logError("Error when removing certificates during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
} catch (IOException | GenericException ex) {
cleanupLogger.logError("Error when removing certificates during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
List<HdfsUsers> usersToClean = getUsersToClean(project);
List<HdfsGroups> groupsToClean = getGroupsToClean(project);
// Remove project related files
try {
removeProjectRelatedFiles(usersToClean, dfso);
cleanupLogger.logSuccess("Removed project related files");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing project-related files during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Remove quotas
try {
removeQuotas(project);
cleanupLogger.logSuccess("Removed quotas");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing quota during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Change owner for files in shared datasets
try {
fixSharedDatasets(project, dfso);
cleanupLogger.logSuccess("Fixed shared datasets");
} catch (Exception ex) {
cleanupLogger.logError("Error when changing ownership during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// 16) Delete Hive database - will automatically cleanup all the Hive's metadata
try {
hiveController.dropDatabases(project, dfso, true);
cleanupLogger.logSuccess("Removed Hive db");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing hive db during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Delete elasticsearch template for this project
try {
removeElasticsearch(project);
cleanupLogger.logSuccess("Removed ElasticSearch");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing elastic during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// delete project group and users
try {
removeGroupAndUsers(groupsToClean, usersToClean);
cleanupLogger.logSuccess("Removed HDFS Groups and Users");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing HDFS groups/users during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// remove running tensorboards repos
try {
removeTensorBoard(project);
cleanupLogger.logSuccess("Removed local TensorBoards");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing running TensorBoards during project cleanup");
}
try {
servingController.deleteAll(project);
cleanupLogger.logSuccess("Removed servings");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing serving instances");
cleanupLogger.logError(ex.getMessage());
}
// Remove project DAGs, JWT monitors and free X.509 certificates
try {
airflowManager.onProjectRemoval(project);
cleanupLogger.logSuccess("Removed Airflow DAGs and security references");
} catch (Exception ex) {
cleanupLogger.logError("Error while cleaning Airflow DAGs and security references");
cleanupLogger.logError(ex.getMessage());
}
try {
removeCertificatesFromMaterializer(project);
cleanupLogger.logSuccess("Removed all X.509 certificates related to the Project from " + "CertificateMaterializer");
} catch (Exception ex) {
cleanupLogger.logError("Error while force removing Project certificates from CertificateMaterializer");
cleanupLogger.logError(ex.getMessage());
}
// remove conda envs
try {
removeAnacondaEnv(project);
cleanupLogger.logSuccess("Removed conda envs");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing conda envs during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// remove dumy Inode
try {
dfso.rm(dummy, true);
cleanupLogger.logSuccess("Removed dummy Inode");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing dummy Inode during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// remove folder
try {
removeProjectFolder(project.getName(), dfso);
cleanupLogger.logSuccess("Removed root Project folder");
} catch (Exception ex) {
cleanupLogger.logError("Error when removing root Project dir during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
try {
removeAlertConfigs(project);
cleanupLogger.logSuccess("Cleaning alert manager config from project");
} catch (Exception ex) {
cleanupLogger.logError("Error cleaning alert manager config during project cleanup");
cleanupLogger.logError(ex.getMessage());
}
// Run custom handlers for project deletion
try {
ProjectHandler.runProjectPostDeleteHandlers(projectHandlers, project);
cleanupLogger.logSuccess("Handlers successfully run");
} catch (ProjectException e) {
cleanupLogger.logError("Error running handlers during project cleanup");
cleanupLogger.logError(e.getMessage());
}
} else {
// Create /tmp/Project and add to database so we lock in case someone tries to create a Project
// with the same name at the same time
cleanupLogger.logSuccess("Project is *NOT* in the database, going to remove as much as possible");
Date now = DateUtils.localDateTime2Date(DateUtils.getNow());
Users user = userFacade.findByEmail(userEmail);
Project toDeleteProject = new Project(projectName, user, now, settings.getDefaultPaymentType());
toDeleteProject.setKafkaMaxNumTopics(settings.getKafkaMaxNumTopics());
Path tmpInodePath = new Path(File.separator + "tmp" + File.separator + projectName);
try {
if (!dfso.exists(tmpInodePath.toString())) {
dfso.touchz(tmpInodePath);
}
Inode tmpInode = inodeController.getInodeAtPath(tmpInodePath.toString());
if (tmpInode != null) {
toDeleteProject.setInode(tmpInode);
projectFacade.persistProject(toDeleteProject);
projectFacade.flushEm();
cleanupLogger.logSuccess("Created dummy Inode");
}
} catch (IOException ex) {
cleanupLogger.logError("Could not create dummy Inode, moving on unsafe");
}
// Kill jobs
List<HdfsUsers> projectHdfsUsers = hdfsUsersController.getAllProjectHdfsUsers(projectName);
try {
Set<String> hdfsUsersStr = new HashSet<>();
for (HdfsUsers hdfsUser : projectHdfsUsers) {
hdfsUsersStr.add(hdfsUser.getName());
}
List<ApplicationReport> projectApps = getYarnApplications(hdfsUsersStr, yarnClientWrapper.getYarnClient());
waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient());
cleanupLogger.logSuccess("Killed all Yarn Applications");
} catch (Exception ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove project related files
try {
removeProjectRelatedFiles(projectHdfsUsers, dfso);
cleanupLogger.logSuccess("Removed project related files from HDFS");
} catch (IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove Hive database
try {
hiveController.dropDatabases(toDeleteProject, dfso, true);
cleanupLogger.logSuccess("Dropped Hive database");
} catch (IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove ElasticSearch index
try {
removeElasticsearch(toDeleteProject);
cleanupLogger.logSuccess("Removed ElasticSearch");
} catch (Exception ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove HDFS Groups and Users
try {
List<HdfsGroups> projectHdfsGroups = hdfsUsersController.getAllProjectHdfsGroups(projectName);
removeGroupAndUsers(projectHdfsGroups, projectHdfsUsers);
cleanupLogger.logSuccess("Removed HDFS Groups and Users");
} catch (IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove Yarn project quota
try {
removeQuotas(toDeleteProject);
cleanupLogger.logSuccess("Removed project quota");
} catch (Exception ex) {
cleanupLogger.logError(ex.getMessage());
}
List<ProjectTeam> reconstructedProjectTeam = new ArrayList<>();
try {
for (HdfsUsers hdfsUser : hdfsUsersController.getAllProjectHdfsUsers(projectName)) {
Users foundUser = userFacade.findByUsername(hdfsUser.getUsername());
if (foundUser != null) {
reconstructedProjectTeam.add(new ProjectTeam(toDeleteProject, foundUser));
}
}
} catch (Exception ex) {
// NOOP
}
toDeleteProject.setProjectTeamCollection(reconstructedProjectTeam);
try {
airflowManager.onProjectRemoval(toDeleteProject);
cleanupLogger.logSuccess("Removed Airflow DAGs and security references");
} catch (Exception ex) {
cleanupLogger.logError("Failed to remove Airflow DAGs and security references");
cleanupLogger.logError(ex.getMessage());
}
try {
removeCertificatesFromMaterializer(toDeleteProject);
cleanupLogger.logSuccess("Freed all x.509 references from CertificateMaterializer");
} catch (Exception ex) {
cleanupLogger.logError("Failed to free all X.509 references from CertificateMaterializer");
cleanupLogger.logError(ex.getMessage());
}
// Remove Certificates
try {
certificatesController.revokeProjectCertificates(toDeleteProject);
userCertsFacade.removeAllCertsOfAProject(projectName);
cleanupLogger.logSuccess("Deleted certificates");
} catch (HopsSecurityException | GenericException | IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove root project directory
try {
removeProjectFolder(projectName, dfso);
cleanupLogger.logSuccess("Removed root project directory");
} catch (IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
// Remove /tmp/project
try {
dfso.rm(new Path(File.separator + "tmp" + File.separator + projectName), true);
cleanupLogger.logSuccess("Removed /tmp");
} catch (IOException ex) {
cleanupLogger.logError(ex.getMessage());
}
}
} finally {
dfs.closeDfsClient(dfso);
ycs.closeYarnClient(yarnClientWrapper);
LOGGER.log(Level.INFO, cleanupLogger.getSuccessLog().toString());
String errorLog = cleanupLogger.getErrorLog().toString();
if (!errorLog.isEmpty()) {
LOGGER.log(Level.SEVERE, errorLog);
}
sendInbox(cleanupLogger.getSuccessLog().append("\n").append(cleanupLogger.getErrorLog()).append("\n").toString(), userEmail);
}
String[] logs = new String[2];
logs[0] = cleanupLogger.getSuccessLog().toString();
logs[1] = cleanupLogger.getErrorLog().toString();
return logs;
}
use of io.hops.hopsworks.exceptions.HopsSecurityException in project hopsworks by logicalclocks.
the class ProjectController method removeMemberFromTeam.
public void removeMemberFromTeam(Project project, Users userToBeRemoved) throws ProjectException, ServiceException, IOException, GenericException, JobException, HopsSecurityException, TensorBoardException, FeaturestoreException {
ProjectTeam projectTeam = projectTeamFacade.findProjectTeam(project, userToBeRemoved);
if (projectTeam == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.TEAM_MEMBER_NOT_FOUND, Level.FINE, "project: " + project + ", user: " + userToBeRemoved.getEmail());
}
// Not able to remove project owner regardless of who is trying to remove the member
if (project.getOwner().equals(userToBeRemoved)) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_OWNER_NOT_ALLOWED, Level.FINE);
}
projectTeamFacade.removeProjectTeam(project, userToBeRemoved);
String hdfsUser = hdfsUsersController.getHdfsUserName(project, userToBeRemoved);
YarnClientWrapper yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration());
YarnClient client = yarnClientWrapper.getYarnClient();
try {
Set<String> hdfsUsers = new HashSet<>();
hdfsUsers.add(hdfsUser);
List<ApplicationReport> projectsApps = client.getApplications(null, hdfsUsers, null, EnumSet.of(YarnApplicationState.ACCEPTED, YarnApplicationState.NEW, YarnApplicationState.NEW_SAVING, YarnApplicationState.RUNNING, YarnApplicationState.SUBMITTED));
// kill jupyter for this user
JupyterProject jupyterProject = jupyterFacade.findByUser(hdfsUser);
if (jupyterProject != null) {
jupyterController.shutdown(project, hdfsUser, userToBeRemoved, jupyterProject.getSecret(), jupyterProject.getCid(), jupyterProject.getPort());
}
// kill running TB if any
tensorBoardController.cleanup(project, userToBeRemoved);
// kill all jobs run by this user.
// kill jobs
List<Jobs> running = jobFacade.getRunningJobs(project, hdfsUser);
if (running != null && !running.isEmpty()) {
for (Jobs job : running) {
executionController.stop(job);
}
}
// wait that log aggregation for the jobs finish
for (ApplicationReport appReport : projectsApps) {
FinalApplicationStatus finalState = appReport.getFinalApplicationStatus();
while (finalState.equals(FinalApplicationStatus.UNDEFINED)) {
client.killApplication(appReport.getApplicationId());
appReport = client.getApplicationReport(appReport.getApplicationId());
finalState = appReport.getFinalApplicationStatus();
}
YarnLogUtil.waitForLogAggregation(client, appReport.getApplicationId());
}
} catch (YarnException | IOException | InterruptedException e) {
throw new ProjectException(RESTCodes.ProjectErrorCode.KILL_MEMBER_JOBS, Level.SEVERE, "project: " + project + ", user: " + userToBeRemoved, e.getMessage(), e);
} finally {
ycs.closeYarnClient(yarnClientWrapper);
}
// trigger project team role remove handlers
ProjectTeamRoleHandler.runProjectTeamRoleRemoveMembersHandlers(projectTeamRoleHandlers, project, Collections.singletonList(userToBeRemoved));
// Revoke privileges for online feature store
if (projectServiceFacade.isServiceEnabledForProject(project, ProjectServiceEnum.FEATURESTORE)) {
Featurestore featurestore = featurestoreController.getProjectFeaturestore(project);
onlineFeaturestoreController.removeOnlineFeaturestoreUser(featurestore, userToBeRemoved);
}
kafkaController.removeProjectMemberFromTopics(project, userToBeRemoved);
certificateMaterializer.forceRemoveLocalMaterial(userToBeRemoved.getUsername(), project.getName(), null, false);
try {
certificatesController.revokeUserSpecificCertificates(project, userToBeRemoved);
} catch (HopsSecurityException ex) {
if (ex.getErrorCode() != RESTCodes.SecurityErrorCode.CERTIFICATE_NOT_FOUND) {
LOGGER.log(Level.SEVERE, "Could not delete certificates when removing member " + userToBeRemoved.getUsername() + " from project " + project.getName() + ". Manual cleanup is needed!!!", ex);
throw ex;
}
} catch (IOException | GenericException ex) {
LOGGER.log(Level.SEVERE, "Could not delete certificates when removing member " + userToBeRemoved.getUsername() + " from project " + project.getName() + ". Manual cleanup is needed!!!", ex);
throw ex;
}
// TODO: projectTeam might be null?
hdfsUsersController.removeMember(projectTeam);
}
Aggregations