use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.
the class TestHttpFSServer method testDelegationTokenOperations.
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenOperations() throws Exception {
createHttpFSServer(true);
URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
AuthenticationToken token = new AuthenticationToken("u", "p", new KerberosDelegationTokenAuthenticationHandler().getType());
token.setExpires(System.currentTimeMillis() + 100000000);
SignerSecretProvider secretProvider = StringSignerSecretProviderCreator.newStringSignerSecretProvider();
Properties secretProviderProps = new Properties();
secretProviderProps.setProperty(AuthenticationFilter.SIGNATURE_SECRET, "secret");
secretProvider.init(secretProviderProps, null, -1);
Signer signer = new Signer(secretProvider);
String tokenSigned = signer.sign(token.toString());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
JSONObject json = (JSONObject) new JSONParser().parse(new InputStreamReader(conn.getInputStream()));
json = (JSONObject) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_JSON);
String tokenStr = (String) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
// getTrash test with delegation
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETTRASHROOT&delegation=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETTRASHROOT");
conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.
the class TestHttpFSServerNoXAttrs method testWithXAttrs.
/**
* Ensure that GETXATTRS, SETXATTR, REMOVEXATTR fail.
*/
@Test
@TestDir
@TestJetty
@TestHdfs
public void testWithXAttrs() throws Exception {
final String name1 = "user.a1";
final byte[] value1 = new byte[] { 0x31, 0x32, 0x33 };
final String dir = "/noXAttr";
final String path = dir + "/file";
startMiniDFS();
createHttpFSServer();
FileSystem fs = FileSystem.get(nnConf);
fs.mkdirs(new Path(dir));
OutputStream os = fs.create(new Path(path));
os.write(1);
os.close();
/* GETXATTRS, SETXATTR, REMOVEXATTR fail */
getStatus(path, "GETXATTRS");
putCmd(path, "SETXATTR", TestHttpFSServer.setXAttrParam(name1, value1));
putCmd(path, "REMOVEXATTR", "xattr.name=" + name1);
}
use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.
the class TestHttpFSWithKerberos method testValidHttpFSAccess.
@Test
@TestDir
@TestJetty
@TestHdfs
public void testValidHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
return null;
}
});
}
use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.
the class TestFileSystemAccessService method fileSystemExecutorException.
@Test
@TestDir
@TestHdfs
public void fileSystemExecutorException() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
Configuration hadoopConf = new Configuration(false);
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
createHadoopConf(hadoopConf);
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.filesystem.cache.purge.timeout", "0");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
FileSystemAccess hadoop = server.get(FileSystemAccess.class);
final FileSystem[] fsa = new FileSystem[1];
try {
hadoop.execute("u", hadoop.getFileSystemConfiguration(), new FileSystemAccess.FileSystemExecutor<Void>() {
@Override
public Void execute(FileSystem fs) throws IOException {
fsa[0] = fs;
throw new IOException();
}
});
Assert.fail();
} catch (FileSystemAccessException ex) {
Assert.assertEquals(ex.getError(), FileSystemAccessException.ERROR.H03);
} catch (Exception ex) {
Assert.fail();
}
try {
fsa[0].mkdirs(new Path("/tmp/foo"));
Assert.fail();
} catch (IOException ex) {
} catch (Exception ex) {
Assert.fail();
}
server.destroy();
}
use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.
the class TestFileSystemAccessService method createFileSystem.
@Test
@TestDir
@TestHdfs
public void createFileSystem() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
Configuration hadoopConf = new Configuration(false);
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
createHadoopConf(hadoopConf);
Configuration conf = new Configuration(false);
conf.set("server.services", services);
conf.set("server.hadoop.filesystem.cache.purge.timeout", "0");
Server server = new Server("server", dir, dir, dir, dir, conf);
server.init();
FileSystemAccess hadoop = server.get(FileSystemAccess.class);
FileSystem fs = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
Assert.assertNotNull(fs);
fs.mkdirs(new Path("/tmp/foo"));
hadoop.releaseFileSystem(fs);
try {
fs.mkdirs(new Path("/tmp/foo"));
Assert.fail();
} catch (IOException ex) {
} catch (Exception ex) {
Assert.fail();
}
server.destroy();
}
Aggregations