use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.QueryStringDecoder in project hadoop by apache.
the class WebHdfsHandler method channelRead0.
@Override
public void channelRead0(final ChannelHandlerContext ctx, final HttpRequest req) throws Exception {
Preconditions.checkArgument(req.getUri().startsWith(WEBHDFS_PREFIX));
QueryStringDecoder queryString = new QueryStringDecoder(req.getUri());
params = new ParameterParser(queryString, conf);
DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
ugi = ugiProvider.ugi();
path = params.path();
injectToken();
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
handle(ctx, req);
} finally {
String host = null;
try {
host = ((InetSocketAddress) ctx.channel().remoteAddress()).getAddress().getHostAddress();
} catch (Exception e) {
LOG.warn("Error retrieving hostname: ", e);
host = "unknown";
}
REQLOG.info(host + " " + req.getMethod() + " " + req.getUri() + " " + getResponseCode());
}
return null;
}
});
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.QueryStringDecoder in project hadoop by apache.
the class TestDataNodeUGIProvider method testUGICacheSecure.
@Test
public void testUGICacheSecure() throws Exception {
// fake turning on security so api thinks it should use tokens
SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
ugi.setAuthenticationMethod(KERBEROS);
ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
UserGroupInformation.setLoginUser(ugi);
List<Token<DelegationTokenIdentifier>> tokens = Lists.newArrayList();
getWebHdfsFileSystem(ugi, conf, tokens);
String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(0).encodeToUrlString()));
String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(1).encodeToUrlString()));
DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
UserGroupInformation ugi11 = ugiProvider1.ugi();
UserGroupInformation ugi12 = ugiProvider1.ugi();
Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", ugi11, ugi12);
DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
UserGroupInformation url21 = ugiProvider2.ugi();
UserGroupInformation url22 = ugiProvider2.ugi();
Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
ugiProvider2.clearCache();
awaitCacheEmptyDueToExpiration();
ugi12 = ugiProvider1.ugi();
url22 = ugiProvider2.ugi();
String msg = "With cache eviction, two UGIs returned" + " by the same token should not be same";
Assert.assertNotEquals(msg, ugi11, ugi12);
Assert.assertNotEquals(msg, url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.QueryStringDecoder in project hadoop by apache.
the class TestDataNodeUGIProvider method testUGICacheInSecure.
@Test
public void testUGICacheInSecure() throws Exception {
String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("root"));
String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("hdfs"));
DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
UserGroupInformation ugi11 = ugiProvider1.ugi();
UserGroupInformation ugi12 = ugiProvider1.ugi();
Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", ugi11, ugi12);
DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
UserGroupInformation url21 = ugiProvider2.ugi();
UserGroupInformation url22 = ugiProvider2.ugi();
Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
awaitCacheEmptyDueToExpiration();
ugi12 = ugiProvider1.ugi();
url22 = ugiProvider2.ugi();
String msg = "With cache eviction, two UGIs returned by" + " the same user should not be same";
Assert.assertNotEquals(msg, ugi11, ugi12);
Assert.assertNotEquals(msg, url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.QueryStringDecoder in project hadoop by apache.
the class TestParameterParser method testDeserializeHAToken.
@Test
public void testDeserializeHAToken() throws IOException {
Configuration conf = DFSTestUtil.newHAConfiguration(LOGICAL_NAME);
final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>();
QueryStringDecoder decoder = new QueryStringDecoder(WebHdfsHandler.WEBHDFS_PREFIX + "/?" + NamenodeAddressParam.NAME + "=" + LOGICAL_NAME + "&" + DelegationParam.NAME + "=" + token.encodeToUrlString());
ParameterParser testParser = new ParameterParser(decoder, conf);
final Token<DelegationTokenIdentifier> tok2 = testParser.delegationToken();
Assert.assertTrue(HAUtilClient.isTokenForLogicalUri(tok2));
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.QueryStringDecoder in project hadoop by apache.
the class TestParameterParser method testDecodePath.
@Test
public void testDecodePath() {
final String ESCAPED_PATH = "/test%25+1%26%3Dtest?op=OPEN&foo=bar";
final String EXPECTED_PATH = "/test%+1&=test";
Configuration conf = new Configuration();
QueryStringDecoder decoder = new QueryStringDecoder(WebHdfsHandler.WEBHDFS_PREFIX + ESCAPED_PATH);
ParameterParser testParser = new ParameterParser(decoder, conf);
Assert.assertEquals(EXPECTED_PATH, testParser.path());
}
Aggregations