use of org.apache.hadoop.security.authentication.client.AuthenticationException in project hadoop by apache.
the class AuthToken method split.
/**
* Splits the string representation of a token into attributes pairs.
*
* @param tokenStr string representation of a token.
*
* @return a map with the attribute pairs of the token.
*
* @throws AuthenticationException thrown if the string representation of the token could not be broken into
* attribute pairs.
*/
private static Map<String, String> split(String tokenStr) throws AuthenticationException {
Map<String, String> map = new HashMap<String, String>();
StringTokenizer st = new StringTokenizer(tokenStr, ATTR_SEPARATOR);
while (st.hasMoreTokens()) {
String part = st.nextToken();
int separator = part.indexOf('=');
if (separator == -1) {
throw new AuthenticationException("Invalid authentication token");
}
String key = part.substring(0, separator);
String value = part.substring(separator + 1);
map.put(key, value);
}
return map;
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project hadoop by apache.
the class TransferFsImage method uploadImage.
/*
* Uploads the imagefile using HTTP PUT method
*/
private static void uploadImage(URL url, Configuration conf, NNStorage storage, NameNodeFile nnf, long txId, Canceler canceler) throws IOException {
File imageFile = storage.findImageFile(nnf, txId);
if (imageFile == null) {
throw new IOException("Could not find image with txid " + txId);
}
HttpURLConnection connection = null;
try {
URIBuilder uriBuilder = new URIBuilder(url.toURI());
// write all params for image upload request as query itself.
// Request body contains the image to be uploaded.
Map<String, String> params = ImageServlet.getParamsForPutImage(storage, txId, imageFile.length(), nnf);
for (Entry<String, String> entry : params.entrySet()) {
uriBuilder.addParameter(entry.getKey(), entry.getValue());
}
URL urlWithParams = uriBuilder.build().toURL();
connection = (HttpURLConnection) connectionFactory.openConnection(urlWithParams, UserGroupInformation.isSecurityEnabled());
// Set the request to PUT
connection.setRequestMethod("PUT");
connection.setDoOutput(true);
int chunkSize = conf.getInt(DFSConfigKeys.DFS_IMAGE_TRANSFER_CHUNKSIZE_KEY, DFSConfigKeys.DFS_IMAGE_TRANSFER_CHUNKSIZE_DEFAULT);
if (imageFile.length() > chunkSize) {
// using chunked streaming mode to support upload of 2GB+ files and to
// avoid internal buffering.
// this mode should be used only if more than chunkSize data is present
// to upload. otherwise upload may not happen sometimes.
connection.setChunkedStreamingMode(chunkSize);
}
setTimeout(connection);
// set headers for verification
ImageServlet.setVerificationHeadersForPut(connection, imageFile);
// Write the file to output stream.
writeFileToPutRequest(conf, connection, imageFile, canceler);
int responseCode = connection.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
throw new HttpPutFailedException(String.format("Image uploading failed, status: %d, url: %s, message: %s", responseCode, urlWithParams, connection.getResponseMessage()), responseCode);
}
} catch (AuthenticationException | URISyntaxException e) {
throw new IOException(e);
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project hadoop by apache.
the class DFSck method doWork.
private int doWork(final String[] args) throws IOException {
final StringBuilder url = new StringBuilder();
url.append("/fsck?ugi=").append(ugi.getShortUserName());
String dir = null;
boolean doListCorruptFileBlocks = false;
for (int idx = 0; idx < args.length; idx++) {
if (args[idx].equals("-move")) {
url.append("&move=1");
} else if (args[idx].equals("-delete")) {
url.append("&delete=1");
} else if (args[idx].equals("-files")) {
url.append("&files=1");
} else if (args[idx].equals("-openforwrite")) {
url.append("&openforwrite=1");
} else if (args[idx].equals("-blocks")) {
url.append("&blocks=1");
} else if (args[idx].equals("-locations")) {
url.append("&locations=1");
} else if (args[idx].equals("-racks")) {
url.append("&racks=1");
} else if (args[idx].equals("-replicaDetails")) {
url.append("&replicadetails=1");
} else if (args[idx].equals("-upgradedomains")) {
url.append("&upgradedomains=1");
} else if (args[idx].equals("-storagepolicies")) {
url.append("&storagepolicies=1");
} else if (args[idx].equals("-showprogress")) {
url.append("&showprogress=1");
} else if (args[idx].equals("-list-corruptfileblocks")) {
url.append("&listcorruptfileblocks=1");
doListCorruptFileBlocks = true;
} else if (args[idx].equals("-includeSnapshots")) {
url.append("&includeSnapshots=1");
} else if (args[idx].equals("-maintenance")) {
url.append("&maintenance=1");
} else if (args[idx].equals("-blockId")) {
StringBuilder sb = new StringBuilder();
idx++;
while (idx < args.length && !args[idx].startsWith("-")) {
sb.append(args[idx]);
sb.append(" ");
idx++;
}
url.append("&blockId=").append(URLEncoder.encode(sb.toString(), "UTF-8"));
} else if (!args[idx].startsWith("-")) {
if (null == dir) {
dir = args[idx];
} else {
System.err.println("fsck: can only operate on one path at a time '" + args[idx] + "'");
printUsage(System.err);
return -1;
}
} else {
System.err.println("fsck: Illegal option '" + args[idx] + "'");
printUsage(System.err);
return -1;
}
}
if (null == dir) {
dir = "/";
}
Path dirpath = null;
URI namenodeAddress = null;
try {
dirpath = getResolvedPath(dir);
namenodeAddress = getCurrentNamenodeAddress(dirpath);
} catch (IOException ioe) {
System.err.println("FileSystem is inaccessible due to:\n" + ioe.toString());
}
if (namenodeAddress == null) {
//Error message already output in {@link #getCurrentNamenodeAddress()}
System.err.println("DFSck exiting.");
return 0;
}
url.insert(0, namenodeAddress.toString());
url.append("&path=").append(URLEncoder.encode(Path.getPathWithoutSchemeAndAuthority(dirpath).toString(), "UTF-8"));
System.err.println("Connecting to namenode via " + url.toString());
if (doListCorruptFileBlocks) {
return listCorruptFileBlocks(dir, url.toString());
}
URL path = new URL(url.toString());
URLConnection connection;
try {
connection = connectionFactory.openConnection(path, isSpnegoEnabled);
} catch (AuthenticationException e) {
throw new IOException(e);
}
InputStream stream = connection.getInputStream();
BufferedReader input = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String line = null;
String lastLine = null;
int errCode = -1;
try {
while ((line = input.readLine()) != null) {
out.println(line);
lastLine = line;
}
} finally {
input.close();
}
if (lastLine.endsWith(NamenodeFsck.HEALTHY_STATUS)) {
errCode = 0;
} else if (lastLine.endsWith(NamenodeFsck.CORRUPT_STATUS)) {
errCode = 1;
} else if (lastLine.endsWith(NamenodeFsck.NONEXISTENT_STATUS)) {
errCode = 0;
} else if (lastLine.contains("Incorrect blockId format:")) {
errCode = 0;
} else if (lastLine.endsWith(NamenodeFsck.DECOMMISSIONED_STATUS)) {
errCode = 2;
} else if (lastLine.endsWith(NamenodeFsck.DECOMMISSIONING_STATUS)) {
errCode = 3;
} else if (lastLine.endsWith(NamenodeFsck.IN_MAINTENANCE_STATUS)) {
errCode = 4;
} else if (lastLine.endsWith(NamenodeFsck.ENTERING_MAINTENANCE_STATUS)) {
errCode = 5;
}
return errCode;
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project atlas by apache.
the class AtlasAuthenticationFilter method doKerberosAuth.
/**
* This method is copied from hadoop auth lib, code added for error handling and fallback to other auth methods
*
* If the request has a valid authentication token it allows the request to continue to the target resource,
* otherwise it triggers an authentication sequence using the configured {@link org.apache.hadoop.security.authentication.server.AuthenticationHandler}.
*
* @param request the request object.
* @param response the response object.
* @param filterChain the filter chain object.
*
* @throws IOException thrown if an IO error occurred.
* @throws ServletException thrown if a processing error occurred.
*/
public void doKerberosAuth(ServletRequest request, ServletResponse response, FilterChain filterChainWrapper, FilterChain filterChain) throws IOException, ServletException {
boolean unauthorizedResponse = true;
int errCode = HttpServletResponse.SC_UNAUTHORIZED;
AuthenticationException authenticationEx = null;
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
boolean isHttps = "https".equals(httpRequest.getScheme());
AuthenticationHandler authHandler = getAuthenticationHandler();
try {
boolean newToken = false;
AuthenticationToken token;
try {
token = getToken(httpRequest);
} catch (AuthenticationException ex) {
LOG.warn("AuthenticationToken ignored: {}", ex.getMessage());
// will be sent back in a 401 unless filter authenticates
authenticationEx = ex;
token = null;
}
if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
if (token == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] triggering authentication", getRequestURL(httpRequest));
}
token = authHandler.authenticate(httpRequest, httpResponse);
if (token != null && token.getExpires() != 0 && token != AuthenticationToken.ANONYMOUS) {
token.setExpires(System.currentTimeMillis() + getValidity() * 1000);
}
newToken = true;
}
if (token != null) {
unauthorizedResponse = false;
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
}
final AuthenticationToken authToken = token;
httpRequest = new HttpServletRequestWrapper(httpRequest) {
@Override
public String getAuthType() {
return authToken.getType();
}
@Override
public String getRemoteUser() {
return authToken.getUserName();
}
@Override
public Principal getUserPrincipal() {
return (authToken != AuthenticationToken.ANONYMOUS) ? authToken : null;
}
};
if (StringUtils.isNotBlank(httpRequest.getRemoteUser()) && atlasProxyUsers.contains(httpRequest.getRemoteUser())) {
LOG.info("Ignoring kerberos login from proxy user " + httpRequest.getRemoteUser());
httpResponse.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, "");
httpResponse.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
filterChain.doFilter(request, response);
return;
}
if (newToken && !token.isExpired() && token != AuthenticationToken.ANONYMOUS) {
String signedToken = signer.sign(token.toString());
createAuthCookie(httpResponse, signedToken, getCookieDomain(), getCookiePath(), token.getExpires(), isHttps);
}
filterChainWrapper.doFilter(httpRequest, httpResponse);
}
} else {
unauthorizedResponse = false;
}
} catch (AuthenticationException ex) {
// exception from the filter itself is fatal
errCode = HttpServletResponse.SC_FORBIDDEN;
authenticationEx = ex;
LOG.warn("Authentication exception: {}", ex.getMessage(), ex);
}
if (unauthorizedResponse) {
if (!httpResponse.isCommitted()) {
createAuthCookie(httpResponse, "", getCookieDomain(), getCookiePath(), 0, isHttps);
// present.. reset to 403 if not found..
if ((errCode == HttpServletResponse.SC_UNAUTHORIZED) && (!httpResponse.containsHeader(KerberosAuthenticator.WWW_AUTHENTICATE))) {
errCode = HttpServletResponse.SC_FORBIDDEN;
}
if (authenticationEx == null) {
// added this code for atlas error handling and fallback
if (!supportKeyTabBrowserLogin && isBrowser(httpRequest.getHeader("User-Agent"))) {
filterChain.doFilter(request, response);
} else {
boolean chk = true;
Collection<String> headerNames = httpResponse.getHeaderNames();
for (String headerName : headerNames) {
String value = httpResponse.getHeader(headerName);
if (headerName.equalsIgnoreCase("Set-Cookie") && value.startsWith("ATLASSESSIONID")) {
chk = false;
break;
}
}
String authHeader = httpRequest.getHeader("Authorization");
if (authHeader == null && chk) {
filterChain.doFilter(request, response);
} else if (authHeader != null && authHeader.startsWith("Basic")) {
filterChain.doFilter(request, response);
}
}
} else {
httpResponse.sendError(errCode, authenticationEx.getMessage());
}
}
}
}
use of org.apache.hadoop.security.authentication.client.AuthenticationException in project oozie by apache.
the class AuthOozieClient method createConnection.
/**
* Create an authenticated connection to the Oozie server.
* <p>
* It uses Hadoop-auth client authentication which by default supports
* Kerberos HTTP SPNEGO, Pseudo/Simple and anonymous.
* <p>
* if the Java system property {@link #USE_AUTH_TOKEN_CACHE_SYS_PROP} is set to true Hadoop-auth
* authentication token will be cached/used in/from the '.oozie-auth-token' file in the user
* home directory.
*
* @param url the URL to open a HTTP connection to.
* @param method the HTTP method for the HTTP connection.
* @return an authenticated connection to the Oozie server.
* @throws IOException if an IO error occurred.
* @throws OozieClientException if an oozie client error occurred.
*/
@Override
protected HttpURLConnection createConnection(URL url, String method) throws IOException, OozieClientException {
boolean useAuthFile = System.getProperty(USE_AUTH_TOKEN_CACHE_SYS_PROP, "false").equalsIgnoreCase("true");
AuthenticatedURL.Token readToken = null;
AuthenticatedURL.Token currentToken = null;
// Read the token in from the file
if (useAuthFile) {
readToken = readAuthToken();
}
if (readToken == null) {
currentToken = new AuthenticatedURL.Token();
} else {
currentToken = new AuthenticatedURL.Token(readToken.toString());
}
// it)
if (currentToken.isSet()) {
long expires = getExpirationTime(currentToken);
if (expires < System.currentTimeMillis() + 300000) {
if (useAuthFile) {
AUTH_TOKEN_CACHE_FILE.delete();
}
currentToken = new AuthenticatedURL.Token();
}
}
// If we have a token, double check with the Server to make sure it hasn't expired yet
if (currentToken.isSet()) {
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("OPTIONS");
AuthenticatedURL.injectToken(conn, currentToken);
if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED || conn.getResponseCode() == HttpURLConnection.HTTP_FORBIDDEN) {
if (useAuthFile) {
AUTH_TOKEN_CACHE_FILE.delete();
}
currentToken = new AuthenticatedURL.Token();
} else {
// one later.
try {
AuthenticatedURL.extractToken(conn, currentToken);
} catch (AuthenticationException ex) {
if (useAuthFile) {
AUTH_TOKEN_CACHE_FILE.delete();
}
currentToken = new AuthenticatedURL.Token();
}
}
}
// If we didn't have a token, or it had expired, let's get a new one from the Server using the configured Authenticator
if (!currentToken.isSet()) {
Authenticator authenticator = getAuthenticator();
try {
authenticator.authenticate(url, currentToken);
} catch (AuthenticationException ex) {
if (useAuthFile) {
AUTH_TOKEN_CACHE_FILE.delete();
}
throw new OozieClientException(OozieClientException.AUTHENTICATION, "Could not authenticate, " + ex.getMessage(), ex);
}
}
// If we got a new token, save it to the cache file
if (useAuthFile && currentToken.isSet() && !currentToken.equals(readToken)) {
writeAuthToken(currentToken);
}
// Now create a connection using the token and return it to the caller
HttpURLConnection conn = super.createConnection(url, method);
AuthenticatedURL.injectToken(conn, currentToken);
return conn;
}
Aggregations