use of org.apache.hadoop.test.TestDir in project hadoop by apache.
the class TestHttpFSWithKerberos method testDelegationTokenHttpFSAccess.
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
//get delegation token doing SPNEGO authentication
URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) new JSONParser().parse(new InputStreamReader(conn.getInputStream()));
json = (JSONObject) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_JSON);
String tokenStr = (String) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
//access httpfs using the delegation token
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to renew the delegation token without SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
//renew the delegation token with SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = aUrl.openConnection(url, aToken);
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//cancel delegation token, no need for SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to access httpfs with the canceled delegation token
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
return null;
}
});
}
use of org.apache.hadoop.test.TestDir in project hadoop by apache.
the class TestServer method illegalState3.
@Test(expected = IllegalStateException.class)
@TestDir
public void illegalState3() throws Exception {
Server server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false));
server.setService(null);
}
use of org.apache.hadoop.test.TestDir in project hadoop by apache.
the class TestFileSystemAccessService method kerberosInitializationFailure.
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H02.*")
@TestDir
public void kerberosInitializationFailure() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "kerberos");
conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
conf.set("server.hadoop.authentication.kerberos.principal", "foo@FOO");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
}
use of org.apache.hadoop.test.TestDir in project hadoop by apache.
the class TestFileSystemAccessService method invalidSecurity.
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H09.*")
@TestDir
public void invalidSecurity() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.authentication.type", "foo");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
}
use of org.apache.hadoop.test.TestDir in project hadoop by apache.
the class TestFileSystemAccessService method serviceHadoopConfCustomDir.
@Test
@TestDir
public void serviceHadoopConfCustomDir() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String hadoopConfDir = new File(dir, "confx").getAbsolutePath();
new File(hadoopConfDir).mkdirs();
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.config.dir", hadoopConfDir);
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
Configuration hadoopConf = new Configuration(false);
hadoopConf.set("foo", "BAR");
hadoopConf.writeXml(os);
os.close();
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "BAR");
server.destroy();
}
Aggregations