use of org.apache.hadoop.security.authentication.client.AuthenticationException in project dr-elephant by linkedin.
the class AnalyticJobGeneratorHadoop2 method updateResourceManagerAddresses.
public void updateResourceManagerAddresses() {
if (Boolean.valueOf(configuration.get(IS_RM_HA_ENABLED))) {
String resourceManagers = configuration.get(RESOURCE_MANAGER_IDS);
if (resourceManagers != null) {
logger.info("The list of RM IDs are " + resourceManagers);
List<String> ids = Arrays.asList(resourceManagers.split(","));
_currentTime = System.currentTimeMillis();
updateAuthToken();
for (String id : ids) {
try {
String resourceManager = configuration.get(RESOURCE_MANAGER_ADDRESS + "." + id);
String resourceManagerURL = String.format(RM_NODE_STATE_URL, resourceManager);
logger.info("Checking RM URL: " + resourceManagerURL);
JsonNode rootNode = readJsonNode(new URL(resourceManagerURL));
String status = rootNode.path("clusterInfo").path("haState").getValueAsText();
if (status.equals("ACTIVE")) {
logger.info(resourceManager + " is ACTIVE");
_resourceManagerAddress = resourceManager;
break;
} else {
logger.info(resourceManager + " is STANDBY");
}
} catch (AuthenticationException e) {
logger.info("Error fetching resource manager " + id + " state " + e.getMessage());
} catch (IOException e) {
logger.info("Error fetching Json for resource manager " + id + " status " + e.getMessage());
}
}
}
} else {
_resourceManagerAddress = configuration.get(RESOURCE_MANAGER_ADDRESS);
}
if (_resourceManagerAddress == null) {
throw new RuntimeException("Cannot get YARN resource manager address from Hadoop Configuration property: [" + RESOURCE_MANAGER_ADDRESS + "].");
}
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project dr-elephant by linkedin.
the class MapReduceFetcherHadoop2 method fetchData.
@Override
public MapReduceApplicationData fetchData(AnalyticJob analyticJob) throws IOException, AuthenticationException {
String appId = analyticJob.getAppId();
MapReduceApplicationData jobData = new MapReduceApplicationData();
String jobId = Utils.getJobIdFromApplicationId(appId);
jobData.setAppId(appId).setJobId(jobId);
// Change job tracking url to job history page
analyticJob.setTrackingUrl(_jhistoryWebAddr + jobId);
try {
// Fetch job config
Properties jobConf = _jsonFactory.getProperties(_urlFactory.getJobConfigURL(jobId));
jobData.setJobConf(jobConf);
URL jobURL = _urlFactory.getJobURL(jobId);
String state = _jsonFactory.getState(jobURL);
jobData.setSubmitTime(_jsonFactory.getSubmitTime(jobURL));
jobData.setStartTime(_jsonFactory.getStartTime(jobURL));
jobData.setFinishTime(_jsonFactory.getFinishTime(jobURL));
if (state.equals("SUCCEEDED")) {
jobData.setSucceeded(true);
// Fetch job counter
MapReduceCounterData jobCounter = _jsonFactory.getJobCounter(_urlFactory.getJobCounterURL(jobId));
// Fetch task data
URL taskListURL = _urlFactory.getTaskListURL(jobId);
List<MapReduceTaskData> mapperList = new ArrayList<MapReduceTaskData>();
List<MapReduceTaskData> reducerList = new ArrayList<MapReduceTaskData>();
_jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList);
MapReduceTaskData[] mapperData = mapperList.toArray(new MapReduceTaskData[mapperList.size()]);
MapReduceTaskData[] reducerData = reducerList.toArray(new MapReduceTaskData[reducerList.size()]);
jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData);
} else if (state.equals("FAILED")) {
jobData.setSucceeded(false);
// Fetch job counter
MapReduceCounterData jobCounter = _jsonFactory.getJobCounter(_urlFactory.getJobCounterURL(jobId));
// Fetch task data
URL taskListURL = _urlFactory.getTaskListURL(jobId);
List<MapReduceTaskData> mapperList = new ArrayList<MapReduceTaskData>();
List<MapReduceTaskData> reducerList = new ArrayList<MapReduceTaskData>();
_jsonFactory.getTaskDataAll(taskListURL, jobId, mapperList, reducerList);
MapReduceTaskData[] mapperData = mapperList.toArray(new MapReduceTaskData[mapperList.size()]);
MapReduceTaskData[] reducerData = reducerList.toArray(new MapReduceTaskData[reducerList.size()]);
jobData.setCounters(jobCounter).setMapperData(mapperData).setReducerData(reducerData);
String diagnosticInfo;
try {
diagnosticInfo = parseException(jobData.getJobId(), _jsonFactory.getDiagnosticInfo(jobURL));
} catch (Exception e) {
diagnosticInfo = null;
logger.warn("Failed getting diagnostic info for failed job " + jobData.getJobId());
}
jobData.setDiagnosticInfo(diagnosticInfo);
} else {
// Should not reach here
throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED");
}
} finally {
ThreadContextMR2.updateAuthToken();
}
return jobData;
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project zeppelin by apache.
the class KerberosRealm method doKerberosAuth.
/**
* If the request has a valid authentication token it allows the request to continue to
* the target resource,
* otherwise it triggers a GSS-API sequence for authentication
*
* @param request the request object.
* @param response the response object.
* @param filterChain the filter chain object.
* @throws IOException thrown if an IO error occurred.
* @throws ServletException thrown if a processing error occurred.
*/
public void doKerberosAuth(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException, ServletException {
boolean unauthorizedResponse = true;
int errCode = HttpServletResponse.SC_UNAUTHORIZED;
AuthenticationException authenticationEx = null;
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
boolean isHttps = "https".equals(httpRequest.getScheme());
try {
boolean newToken = false;
AuthenticationToken token;
try {
token = getToken(httpRequest);
if (LOG.isDebugEnabled()) {
LOG.debug("Got token {} from httpRequest {}", token, getRequestURL(httpRequest));
if (null != token) {
LOG.debug("token.isExpired() = " + token.isExpired());
}
}
} catch (AuthenticationException ex) {
LOG.warn("AuthenticationToken ignored: " + ex.getMessage());
if (!ex.getMessage().equals("Empty token")) {
// will be sent back in a 401 unless filter authenticates
authenticationEx = ex;
}
token = null;
}
if (managementOperation(token, httpRequest, httpResponse)) {
if (token == null || token.isExpired()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] triggering authentication. handler: {}", getRequestURL(httpRequest), this.getClass());
}
token = authenticate(httpRequest, httpResponse);
if (token != null && token != AuthenticationToken.ANONYMOUS) {
// }
if (token.getExpires() != 0) {
token.setExpires(System.currentTimeMillis() + getTokenValidity() * 1000);
}
}
newToken = true;
}
if (token != null) {
unauthorizedResponse = false;
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
}
final AuthenticationToken authToken = token;
httpRequest = new HttpServletRequestWrapper(httpRequest) {
@Override
public String getAuthType() {
return authToken.getType();
}
@Override
public String getRemoteUser() {
return authToken.getUserName();
}
@Override
public Principal getUserPrincipal() {
return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null;
}
};
// If the token is an old one, renew the its tokenMaxInactiveInterval.
if (!newToken && !isCookiePersistent() && getTokenMaxInactiveInterval() > 0) {
// TODO(vr): uncomment when we move to Hadoop 2.8+
// token.setMaxInactives(System.currentTimeMillis()
// + getTokenMaxInactiveInterval() * 1000);
token.setExpires(token.getExpires());
newToken = true;
}
if (newToken && !token.isExpired() && token != AuthenticationToken.ANONYMOUS) {
String signedToken = signer.sign(token.toString());
createAuthCookie(httpResponse, signedToken, getCookieDomain(), getCookiePath(), token.getExpires(), isCookiePersistent(), isHttps);
}
KerberosToken kerberosToken = new KerberosToken(token.getUserName(), token.toString());
SecurityUtils.getSubject().login(kerberosToken);
doFilter(filterChain, httpRequest, httpResponse);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("managementOperation returned false for request {}." + " token: {}", getRequestURL(httpRequest), token);
}
unauthorizedResponse = false;
}
} catch (AuthenticationException ex) {
// exception from the filter itself is fatal
errCode = HttpServletResponse.SC_FORBIDDEN;
authenticationEx = ex;
if (LOG.isDebugEnabled()) {
LOG.debug("Authentication exception: " + ex.getMessage(), ex);
} else {
LOG.warn("Authentication exception: " + ex.getMessage());
}
}
if (unauthorizedResponse) {
if (!httpResponse.isCommitted()) {
createAuthCookie(httpResponse, "", getCookieDomain(), getCookiePath(), 0, isCookiePersistent(), isHttps);
// present.. reset to 403 if not found..
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED) && (!httpResponse.containsHeader(KerberosAuthenticator.WWW_AUTHENTICATE))) {
errCode = HttpServletResponse.SC_FORBIDDEN;
}
if (authenticationEx == null) {
httpResponse.sendError(errCode, "Authentication required");
} else {
httpResponse.sendError(errCode, authenticationEx.getMessage());
}
}
}
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project zeppelin by apache.
the class KerberosRealm method getKerberosTokenFromCookies.
/**
* A parallel implementation to getTokenFromCookies, this handles
* javax.ws.rs.core.HttpHeaders.Cookies kind.
*
* Used in {@link org.apache.zeppelin.rest.LoginRestApi}::getLogin()
*
* @param cookies - Cookie(s) map read from HttpHeaders
* @return {@link KerberosToken} if available in AUTHORIZATION cookie
*
* @throws org.apache.shiro.authc.AuthenticationException
*/
public static KerberosToken getKerberosTokenFromCookies(Map<String, javax.ws.rs.core.Cookie> cookies) throws org.apache.shiro.authc.AuthenticationException {
KerberosToken kerberosToken = null;
String tokenStr = null;
if (cookies != null) {
for (javax.ws.rs.core.Cookie cookie : cookies.values()) {
if (cookie.getName().equals(KerberosAuthenticator.AUTHORIZATION)) {
tokenStr = cookie.getValue();
if (tokenStr.isEmpty()) {
throw new org.apache.shiro.authc.AuthenticationException("Empty token");
}
try {
tokenStr = tokenStr.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
} catch (Exception ex) {
throw new org.apache.shiro.authc.AuthenticationException(ex);
}
break;
}
}
}
if (tokenStr != null) {
try {
AuthenticationToken authToken = AuthenticationToken.parse(tokenStr);
boolean match = verifyTokenType(authToken);
if (!match) {
throw new org.apache.shiro.authc.AuthenticationException("Invalid AuthenticationToken type");
}
if (authToken.isExpired()) {
throw new org.apache.shiro.authc.AuthenticationException("AuthenticationToken expired");
}
kerberosToken = new KerberosToken(authToken.getUserName(), tokenStr);
} catch (AuthenticationException ex) {
throw new org.apache.shiro.authc.AuthenticationException(ex);
}
}
return kerberosToken;
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project hbase by apache.
the class Client method negotiate.
/**
* Initiate client side Kerberos negotiation with the server.
* @param method method to inject the authentication token into.
* @param uri the String to parse as a URL.
* @throws IOException if unknown protocol is found.
*/
private void negotiate(HttpUriRequest method, String uri) throws IOException {
try {
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
KerberosAuthenticator authenticator = new KerberosAuthenticator();
authenticator.authenticate(new URL(uri), token);
// Inject the obtained negotiated token in the method cookie
injectToken(method, token);
} catch (AuthenticationException e) {
LOG.error("Failed to negotiate with the server.", e);
throw new IOException(e);
}
}
Aggregations