use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.
the class WebHdfs method createWebHdfsFileSystem.
/**
* Returns a new {@link WebHdfsFileSystem}, with the given configuration.
*
* @param conf configuration
* @return new WebHdfsFileSystem
*/
private static WebHdfsFileSystem createWebHdfsFileSystem(Configuration conf) {
WebHdfsFileSystem fs = new WebHdfsFileSystem();
fs.setConf(conf);
return fs;
}
use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.
the class TestDataNodeUGIProvider method getWebHdfsFileSystem.
private WebHdfsFileSystem getWebHdfsFileSystem(UserGroupInformation ugi, Configuration conf, List<Token<DelegationTokenIdentifier>> tokens) throws IOException {
if (UserGroupInformation.isSecurityEnabled()) {
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(new Text(ugi.getUserName()), null, null);
FSNamesystem namesystem = mock(FSNamesystem.class);
DelegationTokenSecretManager dtSecretManager = new DelegationTokenSecretManager(86400000, 86400000, 86400000, 86400000, namesystem);
dtSecretManager.startThreads();
Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId, dtSecretManager);
Token<DelegationTokenIdentifier> token2 = new Token<DelegationTokenIdentifier>(dtId, dtSecretManager);
SecurityUtil.setTokenService(token1, NetUtils.createSocketAddr(uri.getAuthority()));
SecurityUtil.setTokenService(token2, NetUtils.createSocketAddr(uri.getAuthority()));
token1.setKind(WebHdfsConstants.WEBHDFS_TOKEN_KIND);
token2.setKind(WebHdfsConstants.WEBHDFS_TOKEN_KIND);
tokens.add(token1);
tokens.add(token2);
ugi.addToken(token1);
ugi.addToken(token2);
}
return (WebHdfsFileSystem) FileSystem.get(uri, conf);
}
use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.
the class TestOfflineImageViewer method testWebImageViewer.
@Test
public void testWebImageViewer() throws Exception {
WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"));
try {
viewer.initServer(originalFsimage.getAbsolutePath());
int port = viewer.getPort();
// create a WebHdfsFileSystem instance
URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
Configuration conf = new Configuration();
WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
// verify the number of directories
FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
assertEquals(dirCount, statuses.length);
// verify the number of files in the directory
statuses = webhdfs.listStatus(new Path("/dir0"));
assertEquals(FILES_PER_DIR, statuses.length);
// compare a file
FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
FileStatus expected = writtenFiles.get("/dir0/file0");
compareFile(expected, status);
// LISTSTATUS operation to an empty directory
statuses = webhdfs.listStatus(new Path("/emptydir"));
assertEquals(0, statuses.length);
// LISTSTATUS operation to a invalid path
URL url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=LISTSTATUS");
verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
// LISTSTATUS operation to a invalid prefix
url = new URL("http://localhost:" + port + "/foo");
verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
// Verify the Erasure Coded empty file status
Path emptyECFilePath = new Path("/ec/EmptyECFile.txt");
FileStatus actualEmptyECFileStatus = webhdfs.getFileStatus(new Path(emptyECFilePath.toString()));
FileStatus expectedEmptyECFileStatus = writtenFiles.get(emptyECFilePath.toString());
System.out.println(webhdfs.getFileStatus(new Path(emptyECFilePath.toString())));
compareFile(expectedEmptyECFileStatus, actualEmptyECFileStatus);
// Verify the Erasure Coded small file status
Path smallECFilePath = new Path("/ec/SmallECFile.txt");
FileStatus actualSmallECFileStatus = webhdfs.getFileStatus(new Path(smallECFilePath.toString()));
FileStatus expectedSmallECFileStatus = writtenFiles.get(smallECFilePath.toString());
compareFile(expectedSmallECFileStatus, actualSmallECFileStatus);
// GETFILESTATUS operation
status = webhdfs.getFileStatus(new Path("/dir0/file0"));
compareFile(expected, status);
// GETFILESTATUS operation to a invalid path
url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETFILESTATUS");
verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
// invalid operation
url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=INVALID");
verifyHttpResponseCode(HttpURLConnection.HTTP_BAD_REQUEST, url);
// invalid method
url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("POST");
connection.connect();
assertEquals(HttpURLConnection.HTTP_BAD_METHOD, connection.getResponseCode());
} finally {
// shutdown the viewer
viewer.close();
}
}
use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.
the class TestOfflineImageViewerForAcl method testWebImageViewerForAcl.
@Test
public void testWebImageViewerForAcl() throws Exception {
WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"));
try {
viewer.initServer(originalFsimage.getAbsolutePath());
int port = viewer.getPort();
// create a WebHdfsFileSystem instance
URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
Configuration conf = new Configuration();
WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
// GETACLSTATUS operation to a directory without ACL
AclStatus acl = webhdfs.getAclStatus(new Path("/dirWithNoAcl"));
assertEquals(writtenAcls.get("/dirWithNoAcl"), acl);
// GETACLSTATUS operation to a directory with a default ACL
acl = webhdfs.getAclStatus(new Path("/dirWithDefaultAcl"));
assertEquals(writtenAcls.get("/dirWithDefaultAcl"), acl);
// GETACLSTATUS operation to a file without ACL
acl = webhdfs.getAclStatus(new Path("/noAcl"));
assertEquals(writtenAcls.get("/noAcl"), acl);
// GETACLSTATUS operation to a file with a ACL
acl = webhdfs.getAclStatus(new Path("/withAcl"));
assertEquals(writtenAcls.get("/withAcl"), acl);
// GETACLSTATUS operation to a file with several ACL entries
acl = webhdfs.getAclStatus(new Path("/withSeveralAcls"));
assertEquals(writtenAcls.get("/withSeveralAcls"), acl);
// GETACLSTATUS operation to a invalid path
URL url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETACLSTATUS");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
assertEquals(HttpURLConnection.HTTP_NOT_FOUND, connection.getResponseCode());
} finally {
// shutdown the viewer
viewer.close();
}
}
use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.
the class TestOfflineImageViewerForContentSummary method testGetContentSummaryForEmptyDirectory.
@Test
public void testGetContentSummaryForEmptyDirectory() throws Exception {
try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"))) {
viewer.initServer(originalFsimage.getAbsolutePath());
int port = viewer.getPort();
URL url = new URL("http://localhost:" + port + "/webhdfs/v1/parentDir/childDir2?op=GETCONTENTSUMMARY");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
// create a WebHdfsFileSystem instance
URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
Configuration conf = new Configuration();
WebHdfsFileSystem webfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
ContentSummary summary = webfs.getContentSummary(new Path("/parentDir/childDir2"));
verifyContentSummary(emptyDirSummaryFromDFS, summary);
}
}
Aggregations