use of io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers in project hopsworks by logicalclocks.
the class ModelsBuilder method buildFilter.
private Pair<ProvStateParamBuilder, ModelRegistryDTO> buildFilter(Project project, Project modelRegistryProject, Set<? extends AbstractFacade.FilterBy> filters) throws GenericException, ProvenanceException, DatasetException {
ProvStateParamBuilder provFilesParamBuilder = new ProvStateParamBuilder();
if (filters != null) {
Users filterUser = null;
Project filterUserProject = project;
for (AbstractFacade.FilterBy filterBy : filters) {
if (filterBy.getParam().compareToIgnoreCase(Filters.NAME_EQ.name()) == 0) {
provFilesParamBuilder.filterByXAttr(MODEL_SUMMARY_XATTR_NAME + ".name", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.NAME_LIKE.name()) == 0) {
provFilesParamBuilder.filterLikeXAttr(MODEL_SUMMARY_XATTR_NAME + ".name", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.VERSION.name()) == 0) {
provFilesParamBuilder.filterByXAttr(MODEL_SUMMARY_XATTR_NAME + ".version", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.ID_EQ.name()) == 0) {
provFilesParamBuilder.filterByXAttr(MODEL_SUMMARY_XATTR_NAME + ".id", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.USER.name()) == 0) {
try {
filterUser = userFacade.find(Integer.parseInt(filterBy.getValue()));
} catch (NumberFormatException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "expected int user id, found: " + filterBy.getValue());
}
} else if (filterBy.getParam().compareToIgnoreCase(Filters.USER_PROJECT.name()) == 0) {
try {
filterUserProject = projectFacade.find(Integer.parseInt(filterBy.getValue()));
} catch (NumberFormatException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "expected int user project id, found: " + filterBy.getValue());
}
} else {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "Filter by - found: " + filterBy.getParam() + " expected:" + EnumSet.allOf(Filters.class));
}
}
if (filterUser != null) {
ProjectTeam member = projectTeamFacade.findByPrimaryKey(filterUserProject, filterUser);
if (member == null) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "Selected user: " + filterUser.getUid() + " is not part of project:" + filterUserProject.getId());
}
String hdfsUserStr = hdfsUsersController.getHdfsUserName(filterUserProject, filterUser);
HdfsUsers hdfsUsers = hdfsUsersFacade.findByName(hdfsUserStr);
provFilesParamBuilder.filterByField(ProvStateParser.FieldsP.USER_ID, hdfsUsers.getId().toString());
}
}
ModelRegistryDTO modelRegistryDTO = modelsController.getModelRegistry(modelRegistryProject);
provFilesParamBuilder.filterByField(ProvStateParser.FieldsP.PROJECT_I_ID, modelRegistryDTO.getParentProject().getInode().getId()).filterByField(ProvStateParser.FieldsP.DATASET_I_ID, modelRegistryDTO.getDatasetInodeId());
return Pair.with(provFilesParamBuilder, modelRegistryDTO);
}
use of io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers in project hopsworks by logicalclocks.
the class JupyterService method startNotebookServer.
@POST
@Path("/start")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response startNotebookServer(JupyterSettings jupyterSettings, @Context HttpServletRequest req, @Context SecurityContext sc, @Context UriInfo uriInfo) throws ProjectException, HopsSecurityException, ServiceException, GenericException, JobException {
Users hopsworksUser = jWTHelper.getUserPrincipal(sc);
String hdfsUser = hdfsUsersController.getHdfsUserName(project, hopsworksUser);
// from in the front-end
if (jupyterSettings.getUsers() == null) {
jupyterSettings.setUsers(hopsworksUser);
}
if (project.getPaymentType().equals(PaymentType.PREPAID)) {
YarnProjectsQuota projectQuota = yarnProjectsQuotaFacade.findByProjectName(project.getName());
if (projectQuota == null || projectQuota.getQuotaRemaining() <= 0) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_QUOTA_ERROR, Level.FINE);
}
}
if (project.getPythonEnvironment() == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.ANACONDA_NOT_ENABLED, Level.FINE);
}
if (jupyterSettings.getMode() == null) {
// set default mode for jupyter if mode is null
jupyterSettings.setMode(JupyterMode.JUPYTER_LAB);
}
// Jupyter Git works only for JupyterLab
if (jupyterSettings.isGitBackend() && jupyterSettings.getMode().equals(JupyterMode.JUPYTER_CLASSIC)) {
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "Git support available only in JupyterLab");
}
// Do not allow auto push on shutdown if api key is missing
GitConfig gitConfig = jupyterSettings.getGitConfig();
if (jupyterSettings.isGitBackend() && gitConfig.getShutdownAutoPush() && Strings.isNullOrEmpty(gitConfig.getApiKeyName())) {
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "Auto push not supported if api key is not configured.");
}
// Verify that API token has got write access on the repo if ShutdownAutoPush is enabled
if (jupyterSettings.isGitBackend() && gitConfig.getShutdownAutoPush() && !jupyterNbVCSController.hasWriteAccess(hopsworksUser, gitConfig.getApiKeyName(), gitConfig.getRemoteGitURL(), gitConfig.getGitBackend())) {
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "API token " + gitConfig.getApiKeyName() + " does not have write access on " + gitConfig.getRemoteGitURL());
}
JupyterProject jp = jupyterFacade.findByUser(hdfsUser);
if (jp == null) {
HdfsUsers user = hdfsUsersFacade.findByName(hdfsUser);
String configSecret = DigestUtils.sha256Hex(Integer.toString(ThreadLocalRandom.current().nextInt()));
JupyterDTO dto = null;
DistributedFileSystemOps dfso = dfsService.getDfsOps();
String allowOriginHost = uriInfo.getBaseUri().getHost();
int allowOriginPort = uriInfo.getBaseUri().getPort();
String allowOriginPortStr = allowOriginPort != -1 ? ":" + allowOriginPort : "";
String allowOrigin = settings.getJupyterOriginScheme() + "://" + allowOriginHost + allowOriginPortStr;
try {
jupyterSettingsFacade.update(jupyterSettings);
// Inspect dependencies
sparkController.inspectDependencies(project, hopsworksUser, (SparkJobConfiguration) jupyterSettings.getJobConfig());
dto = jupyterManager.startJupyterServer(project, configSecret, hdfsUser, hopsworksUser, jupyterSettings, allowOrigin);
jupyterJWTManager.materializeJWT(hopsworksUser, project, jupyterSettings, dto.getCid(), dto.getPort(), JUPYTER_JWT_AUD);
HopsUtils.materializeCertificatesForUserCustomDir(project.getName(), user.getUsername(), settings.getHdfsTmpCertDir(), dfso, certificateMaterializer, settings, dto.getCertificatesDir());
jupyterManager.waitForStartup(project, hopsworksUser);
} catch (ServiceException | TimeoutException ex) {
if (dto != null) {
jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
}
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
} catch (IOException ex) {
if (dto != null) {
jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
}
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERT_MATERIALIZATION_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
} finally {
if (dfso != null) {
dfsService.closeDfsClient(dfso);
}
}
String externalIp = Ip.getHost(req.getRequestURL().toString());
try {
Date expirationDate = new Date();
Calendar cal = Calendar.getInstance();
cal.setTime(expirationDate);
cal.add(Calendar.HOUR_OF_DAY, jupyterSettings.getShutdownLevel());
expirationDate = cal.getTime();
jp = jupyterFacade.saveServer(externalIp, project, configSecret, dto.getPort(), user.getId(), dto.getToken(), dto.getCid(), expirationDate, jupyterSettings.isNoLimit());
// set minutes left until notebook server is killed
Duration durationLeft = Duration.between(new Date().toInstant(), jp.getExpires().toInstant());
jp.setMinutesUntilExpiration(durationLeft.toMinutes());
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Failed to save Jupyter notebook settings", e);
jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
}
if (jp == null) {
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_SAVE_SETTINGS_ERROR, Level.SEVERE);
}
if (jupyterSettings.isGitBackend()) {
try {
// Init is idempotent, calling it on an already initialized repo won't affect it
jupyterNbVCSController.init(jp, jupyterSettings);
if (jupyterSettings.getGitConfig().getStartupAutoPull()) {
jupyterNbVCSController.pull(jp, jupyterSettings);
}
} catch (ServiceException ex) {
jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
throw ex;
}
}
} else {
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_SERVER_ALREADY_RUNNING, Level.FINE);
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(jp).build();
}
use of io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers in project hopsworks by logicalclocks.
the class JupyterNotebooksBean method getHdfsUser.
public String getHdfsUser(JupyterProject notebook) {
int hdfsId = notebook.getHdfsUserId();
if (hdfsId == -1) {
return "Orphaned";
}
HdfsUsers hdfsUser = hdfsUsersFacade.find(hdfsId);
return hdfsUser.getName();
}
use of io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers in project hopsworks by logicalclocks.
the class ExperimentsBuilder method buildFilter.
private Pair<ProvStateParamBuilder, Map<Long, ExperimentsEndpointDTO>> buildFilter(Project project, Set<? extends AbstractFacade.FilterBy> filters) throws ProvenanceException, GenericException, DatasetException {
ProvStateParamBuilder provFilesParamBuilder = new ProvStateParamBuilder();
Map<Long, ExperimentsEndpointDTO> selectedEndpoints = new HashMap<>();
if (filters != null) {
Users filterUser = null;
Project filterUserProject = project;
for (AbstractFacade.FilterBy filterBy : filters) {
if (filterBy.getParam().compareToIgnoreCase(Filters.ENDPOINT_ID.name()) == 0) {
ExperimentsEndpointDTO endpoint = verifyExperimentsEndpoint(project, filterBy.getValue());
selectedEndpoints.put(endpoint.getParentProject().getInode().getId(), endpoint);
} else if (filterBy.getParam().compareToIgnoreCase(Filters.NAME_LIKE.name()) == 0) {
provFilesParamBuilder.filterLikeXAttr(EXPERIMENT_SUMMARY_XATTR_NAME + ".name", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.NAME_EQ.name()) == 0) {
provFilesParamBuilder.filterByXAttr(EXPERIMENT_SUMMARY_XATTR_NAME + ".name", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.DATE_START_LT.name()) == 0) {
Long timestamp = getDate(filterBy.getField(), filterBy.getValue()).getTime();
provFilesParamBuilder.filterByField(ProvStateParser.FieldsPF.CREATE_TIMESTAMP_LT, timestamp);
} else if (filterBy.getParam().compareToIgnoreCase(Filters.DATE_START_GT.name()) == 0) {
Long timestamp = getDate(filterBy.getField(), filterBy.getValue()).getTime();
provFilesParamBuilder.filterByField(ProvStateParser.FieldsPF.CREATE_TIMESTAMP_GT, timestamp);
} else if (filterBy.getParam().compareToIgnoreCase(Filters.USER.name()) == 0) {
try {
filterUser = userFacade.find(Integer.parseInt(filterBy.getValue()));
} catch (NumberFormatException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "expected int user id, found: " + filterBy.getValue());
}
} else if (filterBy.getParam().compareToIgnoreCase(Filters.USER_PROJECT.name()) == 0) {
try {
filterUserProject = projectFacade.find(Integer.parseInt(filterBy.getValue()));
} catch (NumberFormatException e) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "expected int user project id, found: " + filterBy.getValue());
}
} else if (filterBy.getParam().compareToIgnoreCase(Filters.STATE.name()) == 0) {
provFilesParamBuilder.filterLikeXAttr(EXPERIMENT_SUMMARY_XATTR_NAME + ".state", filterBy.getValue());
} else if (filterBy.getParam().compareToIgnoreCase(Filters.ID_EQ.name()) == 0) {
provFilesParamBuilder.filterByXAttr(EXPERIMENT_SUMMARY_XATTR_NAME + ".id", filterBy.getValue());
} else {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "Filter by - found: " + filterBy.getParam() + " expected:" + EnumSet.allOf(Filters.class));
}
}
if (filterUser != null) {
ProjectTeam member = projectTeamFacade.findByPrimaryKey(filterUserProject, filterUser);
if (member == null) {
throw new GenericException(RESTCodes.GenericErrorCode.ILLEGAL_ARGUMENT, Level.INFO, "Selected user: " + filterUser.getUid() + " is not part of project:" + filterUserProject.getId());
}
String hdfsUserStr = hdfsUsersController.getHdfsUserName(filterUserProject, filterUser);
HdfsUsers hdfsUsers = hdfsUsersFacade.findByName(hdfsUserStr);
provFilesParamBuilder.filterByField(ProvStateParser.FieldsP.USER_ID, hdfsUsers.getId().toString());
}
}
// an endpoint always has to be selected, if none provided, then all accessible endpoints are used
if (selectedEndpoints.isEmpty()) {
for (ExperimentsEndpointDTO endpoint : experimentsController.getExperimentsEndpoints(project)) {
selectedEndpoints.put(endpoint.getParentProject().getInode().getId(), endpoint);
}
}
for (ExperimentsEndpointDTO endpoint : selectedEndpoints.values()) {
provFilesParamBuilder.filterByField(ProvStateParser.FieldsP.PROJECT_I_ID, endpoint.getParentProject().getInode().getId()).filterByField(ProvStateParser.FieldsP.DATASET_I_ID, endpoint.getDatasetInodeId());
}
return Pair.with(provFilesParamBuilder, selectedEndpoints);
}
use of io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers in project hopsworks by logicalclocks.
the class PermissionsCleaner method testAndFixPermission.
private void testAndFixPermission(ProjectTeam projectTeam, DistributedFileSystemOps dfso, HdfsGroups hdfsDatasetGroup, HdfsGroups hdfsDatasetAclGroup, HdfsUsers owner, DatasetAccessPermission permission) throws IOException {
if (projectTeam.getUser().getUsername().equals("srvmanager")) {
// Does this user need to be in groups?
return;
}
String hdfsUsername = hdfsUsersController.getHdfsUserName(projectTeam.getProject(), projectTeam.getUser());
HdfsUsers hdfsUser = hdfsUsersController.getOrCreateUser(hdfsUsername, dfso);
if (owner != null && owner.equals(hdfsUser)) {
return;
}
switch(permission) {
case EDITABLE:
if (!hdfsDatasetGroup.hasUser(hdfsUser)) {
addToGroup(hdfsUser, hdfsDatasetGroup, dfso);
}
if (hdfsDatasetAclGroup.hasUser(hdfsUser)) {
removeFromGroup(hdfsUser, hdfsDatasetAclGroup, dfso);
}
break;
case READ_ONLY:
if (hdfsDatasetGroup.hasUser(hdfsUser)) {
removeFromGroup(hdfsUser, hdfsDatasetGroup, dfso);
}
if (!hdfsDatasetAclGroup.hasUser(hdfsUser)) {
addToGroup(hdfsUser, hdfsDatasetAclGroup, dfso);
}
break;
case EDITABLE_BY_OWNERS:
if (AllowedRoles.DATA_OWNER.equals(projectTeam.getTeamRole())) {
if (!hdfsDatasetGroup.hasUser(hdfsUser)) {
addToGroup(hdfsUser, hdfsDatasetGroup, dfso);
}
if (hdfsDatasetAclGroup.hasUser(hdfsUser)) {
removeFromGroup(hdfsUser, hdfsDatasetAclGroup, dfso);
}
} else {
if (hdfsDatasetGroup.hasUser(hdfsUser)) {
removeFromGroup(hdfsUser, hdfsDatasetGroup, dfso);
}
if (!hdfsDatasetAclGroup.hasUser(hdfsUser)) {
addToGroup(hdfsUser, hdfsDatasetAclGroup, dfso);
}
}
break;
default:
LOGGER.log(Level.WARNING, "Found a dataset with an unknown permission: group={0}, project={1}", new Object[] { hdfsDatasetGroup, projectTeam.getProject().getName() });
}
}
Aggregations