use of org.apache.hadoop.hdfs.web.resources.OffsetParam in project hadoop by apache.
the class TestWebHDFS method testWebHdfsOffsetAndLength.
@Test
public void testWebHdfsOffsetAndLength() throws Exception {
MiniDFSCluster cluster = null;
final Configuration conf = WebHdfsTestUtil.createConf();
final int OFFSET = 42;
final int LENGTH = 512;
final String PATH = "/foo";
byte[] CONTENTS = new byte[1024];
RANDOM.nextBytes(CONTENTS);
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
try (OutputStream os = fs.create(new Path(PATH))) {
os.write(CONTENTS);
}
InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
URL url = new URL("http", addr.getHostString(), addr.getPort(), WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH)));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setInstanceFollowRedirects(true);
Assert.assertEquals(LENGTH, conn.getContentLength());
byte[] subContents = new byte[LENGTH];
byte[] realContents = new byte[LENGTH];
System.arraycopy(CONTENTS, OFFSET, subContents, 0, LENGTH);
IOUtils.readFully(conn.getInputStream(), realContents);
Assert.assertArrayEquals(subContents, realContents);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.hdfs.web.resources.OffsetParam in project hadoop by apache.
the class TestDataNodeUGIProvider method testUGICacheSecure.
@Test
public void testUGICacheSecure() throws Exception {
// fake turning on security so api thinks it should use tokens
SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
ugi.setAuthenticationMethod(KERBEROS);
ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
UserGroupInformation.setLoginUser(ugi);
List<Token<DelegationTokenIdentifier>> tokens = Lists.newArrayList();
getWebHdfsFileSystem(ugi, conf, tokens);
String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(0).encodeToUrlString()));
String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(1).encodeToUrlString()));
DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
UserGroupInformation ugi11 = ugiProvider1.ugi();
UserGroupInformation ugi12 = ugiProvider1.ugi();
Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", ugi11, ugi12);
DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
UserGroupInformation url21 = ugiProvider2.ugi();
UserGroupInformation url22 = ugiProvider2.ugi();
Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
ugiProvider2.clearCache();
awaitCacheEmptyDueToExpiration();
ugi12 = ugiProvider1.ugi();
url22 = ugiProvider2.ugi();
String msg = "With cache eviction, two UGIs returned" + " by the same token should not be same";
Assert.assertNotEquals(msg, ugi11, ugi12);
Assert.assertNotEquals(msg, url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
}
use of org.apache.hadoop.hdfs.web.resources.OffsetParam in project hadoop by apache.
the class TestDataNodeUGIProvider method testUGICacheInSecure.
@Test
public void testUGICacheInSecure() throws Exception {
String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("root"));
String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("hdfs"));
DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
UserGroupInformation ugi11 = ugiProvider1.ugi();
UserGroupInformation ugi12 = ugiProvider1.ugi();
Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", ugi11, ugi12);
DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
UserGroupInformation url21 = ugiProvider2.ugi();
UserGroupInformation url22 = ugiProvider2.ugi();
Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
awaitCacheEmptyDueToExpiration();
ugi12 = ugiProvider1.ugi();
url22 = ugiProvider2.ugi();
String msg = "With cache eviction, two UGIs returned by" + " the same user should not be same";
Assert.assertNotEquals(msg, ugi11, ugi12);
Assert.assertNotEquals(msg, url21, url22);
Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
}
use of org.apache.hadoop.hdfs.web.resources.OffsetParam in project hadoop by apache.
the class TestParameterParser method testOffset.
@Test
public void testOffset() throws IOException {
final long X = 42;
long offset = new OffsetParam(Long.toString(X)).getOffset();
Assert.assertEquals("OffsetParam: ", X, offset);
offset = new OffsetParam((String) null).getOffset();
Assert.assertEquals("OffsetParam with null should have defaulted to 0", 0, offset);
try {
offset = new OffsetParam("abc").getValue();
Assert.fail("OffsetParam with nondigit value should have thrown IllegalArgumentException");
} catch (IllegalArgumentException iae) {
// Ignore
}
}
Aggregations