use of io.hops.hopsworks.exceptions.ApiKeyException in project hopsworks by logicalclocks.
the class ApiKeyResource method validateScopes.
// For a strange reason the Set of user supplied ApiScope(s) is marshalled
// to String even though it's a Set of ApiScope. We need to explicitly convert
// them to ApiScope
private Set<ApiScope> validateScopes(Users user, Set<ApiScope> scopes) throws ApiKeyException {
Set<ApiScope> validScopes = getScopesForUser(user);
Set<ApiScope> validatedScopes = new HashSet<>(scopes.size());
for (Object scope : scopes) {
try {
ApiScope apiScope = ApiScope.fromString((String) scope);
if (!validScopes.contains(apiScope)) {
throw new ApiKeyException(RESTCodes.ApiKeyErrorCode.KEY_SCOPE_CONTROL_EXCEPTION, Level.FINE, "User is not allowed to issue token " + apiScope.name(), "User " + user.getUsername() + " tried to generate API key with scope " + apiScope + " but it's role is not allowed to");
}
validatedScopes.add(apiScope);
} catch (IllegalArgumentException iae) {
throw new WebApplicationException("Scope need to set a valid scope, but found: " + scope, Response.Status.NOT_FOUND);
}
}
return validatedScopes;
}
use of io.hops.hopsworks.exceptions.ApiKeyException in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method generateConfiguration.
public JupyterPaths generateConfiguration(Project project, String secretConfig, String hdfsUser, Users hopsworksUser, JupyterSettings js, Integer port, String allowOrigin) throws ServiceException, JobException {
boolean newDir = false;
JupyterPaths jp = generateJupyterPaths(project, hdfsUser, secretConfig);
try {
newDir = createJupyterDirs(jp);
createConfigFiles(jp, hdfsUser, hopsworksUser, project, port, js, allowOrigin);
} catch (IOException | ServiceException | ServiceDiscoveryException | ApiKeyException e) {
if (newDir) {
// if the folder was newly created delete it
removeProjectUserDirRecursive(jp);
}
LOGGER.log(Level.SEVERE, "Error in initializing JupyterConfig for project: {0}. {1}", new Object[] { project.getName(), e });
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_ADD_FAILURE, Level.SEVERE, null, e.getMessage(), e);
}
return jp;
}
use of io.hops.hopsworks.exceptions.ApiKeyException in project hopsworks by logicalclocks.
the class ApiKeyController method createNewKey.
/**
* Create new key for the give user with the given key name and scopes.
* @param user
* @param keyName
* @param scopes
* @throws UserException
* @throws ApiKeyException
* @return
*/
public String createNewKey(Users user, String keyName, Set<ApiScope> scopes, Boolean reserved) throws UserException, ApiKeyException {
if (user == null) {
throw new UserException(RESTCodes.UserErrorCode.USER_WAS_NOT_FOUND, Level.FINE);
}
if (keyName == null || keyName.isEmpty()) {
throw new ApiKeyException(RESTCodes.ApiKeyErrorCode.KEY_NAME_NOT_SPECIFIED, Level.FINE);
}
if (keyName.length() > 45) {
throw new ApiKeyException(RESTCodes.ApiKeyErrorCode.KEY_NAME_NOT_VALID, Level.FINE);
}
if (scopes == null || scopes.isEmpty()) {
throw new ApiKeyException(RESTCodes.ApiKeyErrorCode.KEY_SCOPE_NOT_SPECIFIED, Level.FINE);
}
ApiKey apiKey = apiKeyFacade.findByUserAndName(user, keyName);
if (apiKey != null) {
throw new ApiKeyException(RESTCodes.ApiKeyErrorCode.KEY_NAME_EXIST, Level.FINE);
}
Secret secret = generateApiKey();
Date date = new Date();
apiKey = new ApiKey(user, secret.getPrefix(), secret.getSha256HexDigest(), secret.getSalt(), date, date, keyName, reserved);
List<ApiKeyScope> keyScopes = getKeyScopes(scopes, apiKey);
apiKey.setApiKeyScopeCollection(keyScopes);
apiKeyFacade.save(apiKey);
// run create handlers
ApiKeyHandler.runApiKeyCreateHandlers(apiKeyHandlers, apiKey);
sendCreatedEmail(user, keyName, date, scopes);
return secret.getPrefixPlusSecret();
}
use of io.hops.hopsworks.exceptions.ApiKeyException in project hopsworks by logicalclocks.
the class FlinkProxyServlet method service.
@Override
protected void service(HttpServletRequest servletRequest, HttpServletResponse servletResponse) throws ServletException, IOException {
Users user;
if (servletRequest.getUserPrincipal() == null) {
// Check if API key is provided
String authorizationHeader = servletRequest.getHeader("Authorization");
if (Strings.isNullOrEmpty(authorizationHeader)) {
servletResponse.sendError(401, "API key was not provided");
return;
} else {
try {
String key = authorizationHeader.substring(ApiKeyFilter.API_KEY.length()).trim();
ApiKey apiKey = apiKeyController.getApiKey(key);
user = apiKey.getUser();
} catch (ApiKeyException e) {
servletResponse.sendError(401, "Could not validate API key");
return;
}
}
} else {
user = userFacade.findByEmail(servletRequest.getUserPrincipal().getName());
}
String uri = servletRequest.getRequestURI();
Pattern appPattern = Pattern.compile("(application_.*?_\\d*)");
Matcher appMatcher = appPattern.matcher(uri);
String appId;
String flinkMasterURL;
if (appMatcher.find()) {
appId = appMatcher.group(1);
// Validate user is authorized to access to this yarn app
YarnApplicationstate appState = yarnApplicationstateFacade.findByAppId(appId);
// If job is not running, show relevant message
if (!Strings.isNullOrEmpty(appState.getAppsmstate()) && (YarnApplicationState.valueOf(appState.getAppsmstate()) == YarnApplicationState.FAILED || YarnApplicationState.valueOf(appState.getAppsmstate()) == YarnApplicationState.FINISHED || YarnApplicationState.valueOf(appState.getAppsmstate()) == YarnApplicationState.KILLED)) {
servletResponse.sendError(404, "This Flink cluster is not running. You can navigate to YARN and Logs for historical information on this " + "Flink cluster.");
return;
}
HdfsUsers hdfsUser = hdfsUsersFacade.findByName(appState.getAppuser());
if (!projectTeamFacade.isUserMemberOfProject(projectFacade.findByName(hdfsUser.getProject()), user)) {
servletResponse.sendError(403, "You are not authorized to access this Flink cluster");
}
// Is this user member of the project?
flinkMasterURL = flinkMasterAddrCache.get(appId);
if (Strings.isNullOrEmpty(flinkMasterURL)) {
servletResponse.sendError(404, "This Flink cluster is not running. You can navigate to YARN and Logs for historical information on this" + " Flink cluster.");
return;
}
String theHost = "http://" + flinkMasterURL;
URI targetUriHost;
targetUri = theHost;
try {
targetUriObj = new URI(targetUri);
targetUriHost = new URI(theHost);
} catch (Exception e) {
LOGGER.log(Level.INFO, "An error occurred serving the request", e);
return;
}
targetHost = URIUtils.extractHost(targetUriHost);
servletRequest.setAttribute(ATTR_TARGET_URI, targetUri);
servletRequest.setAttribute(ATTR_TARGET_HOST, targetHost);
servletRequest.setAttribute(ATTR_HOST_PORT, flinkMasterURL);
super.service(servletRequest, servletResponse);
} else {
servletResponse.sendError(404, "This Flink cluster is not running. You can navigate to YARN and Logs for historical information on this " + "Flink cluster.");
}
}
Aggregations