Search in sources :

Example 6 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class WebHdfs method createWebHdfsFileSystem.

/**
   * Returns a new {@link WebHdfsFileSystem}, with the given configuration.
   *
   * @param conf configuration
   * @return new WebHdfsFileSystem
   */
private static WebHdfsFileSystem createWebHdfsFileSystem(Configuration conf) {
    WebHdfsFileSystem fs = new WebHdfsFileSystem();
    fs.setConf(conf);
    return fs;
}
Also used : WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem)

Example 7 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestDataNodeUGIProvider method getWebHdfsFileSystem.

private WebHdfsFileSystem getWebHdfsFileSystem(UserGroupInformation ugi, Configuration conf, List<Token<DelegationTokenIdentifier>> tokens) throws IOException {
    if (UserGroupInformation.isSecurityEnabled()) {
        DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(new Text(ugi.getUserName()), null, null);
        FSNamesystem namesystem = mock(FSNamesystem.class);
        DelegationTokenSecretManager dtSecretManager = new DelegationTokenSecretManager(86400000, 86400000, 86400000, 86400000, namesystem);
        dtSecretManager.startThreads();
        Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId, dtSecretManager);
        Token<DelegationTokenIdentifier> token2 = new Token<DelegationTokenIdentifier>(dtId, dtSecretManager);
        SecurityUtil.setTokenService(token1, NetUtils.createSocketAddr(uri.getAuthority()));
        SecurityUtil.setTokenService(token2, NetUtils.createSocketAddr(uri.getAuthority()));
        token1.setKind(WebHdfsConstants.WEBHDFS_TOKEN_KIND);
        token2.setKind(WebHdfsConstants.WEBHDFS_TOKEN_KIND);
        tokens.add(token1);
        tokens.add(token2);
        ugi.addToken(token1);
        ugi.addToken(token2);
    }
    return (WebHdfsFileSystem) FileSystem.get(uri, conf);
}
Also used : DelegationTokenSecretManager(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) Text(org.apache.hadoop.io.Text) Token(org.apache.hadoop.security.token.Token) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) FSNamesystem(org.apache.hadoop.hdfs.server.namenode.FSNamesystem)

Example 8 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestOfflineImageViewer method testWebImageViewer.

@Test
public void testWebImageViewer() throws Exception {
    WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"));
    try {
        viewer.initServer(originalFsimage.getAbsolutePath());
        int port = viewer.getPort();
        // create a WebHdfsFileSystem instance
        URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
        Configuration conf = new Configuration();
        WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
        // verify the number of directories
        FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
        assertEquals(dirCount, statuses.length);
        // verify the number of files in the directory
        statuses = webhdfs.listStatus(new Path("/dir0"));
        assertEquals(FILES_PER_DIR, statuses.length);
        // compare a file
        FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
        FileStatus expected = writtenFiles.get("/dir0/file0");
        compareFile(expected, status);
        // LISTSTATUS operation to an empty directory
        statuses = webhdfs.listStatus(new Path("/emptydir"));
        assertEquals(0, statuses.length);
        // LISTSTATUS operation to a invalid path
        URL url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=LISTSTATUS");
        verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
        // LISTSTATUS operation to a invalid prefix
        url = new URL("http://localhost:" + port + "/foo");
        verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
        // Verify the Erasure Coded empty file status
        Path emptyECFilePath = new Path("/ec/EmptyECFile.txt");
        FileStatus actualEmptyECFileStatus = webhdfs.getFileStatus(new Path(emptyECFilePath.toString()));
        FileStatus expectedEmptyECFileStatus = writtenFiles.get(emptyECFilePath.toString());
        System.out.println(webhdfs.getFileStatus(new Path(emptyECFilePath.toString())));
        compareFile(expectedEmptyECFileStatus, actualEmptyECFileStatus);
        // Verify the Erasure Coded small file status
        Path smallECFilePath = new Path("/ec/SmallECFile.txt");
        FileStatus actualSmallECFileStatus = webhdfs.getFileStatus(new Path(smallECFilePath.toString()));
        FileStatus expectedSmallECFileStatus = writtenFiles.get(smallECFilePath.toString());
        compareFile(expectedSmallECFileStatus, actualSmallECFileStatus);
        // GETFILESTATUS operation
        status = webhdfs.getFileStatus(new Path("/dir0/file0"));
        compareFile(expected, status);
        // GETFILESTATUS operation to a invalid path
        url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETFILESTATUS");
        verifyHttpResponseCode(HttpURLConnection.HTTP_NOT_FOUND, url);
        // invalid operation
        url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=INVALID");
        verifyHttpResponseCode(HttpURLConnection.HTTP_BAD_REQUEST, url);
        // invalid method
        url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS");
        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.setRequestMethod("POST");
        connection.connect();
        assertEquals(HttpURLConnection.HTTP_BAD_METHOD, connection.getResponseCode());
    } finally {
        // shutdown the viewer
        viewer.close();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) HttpURLConnection(java.net.HttpURLConnection) Configuration(org.apache.hadoop.conf.Configuration) URI(java.net.URI) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) URL(java.net.URL) Test(org.junit.Test)

Example 9 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestOfflineImageViewerForAcl method testWebImageViewerForAcl.

@Test
public void testWebImageViewerForAcl() throws Exception {
    WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"));
    try {
        viewer.initServer(originalFsimage.getAbsolutePath());
        int port = viewer.getPort();
        // create a WebHdfsFileSystem instance
        URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
        Configuration conf = new Configuration();
        WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
        // GETACLSTATUS operation to a directory without ACL
        AclStatus acl = webhdfs.getAclStatus(new Path("/dirWithNoAcl"));
        assertEquals(writtenAcls.get("/dirWithNoAcl"), acl);
        // GETACLSTATUS operation to a directory with a default ACL
        acl = webhdfs.getAclStatus(new Path("/dirWithDefaultAcl"));
        assertEquals(writtenAcls.get("/dirWithDefaultAcl"), acl);
        // GETACLSTATUS operation to a file without ACL
        acl = webhdfs.getAclStatus(new Path("/noAcl"));
        assertEquals(writtenAcls.get("/noAcl"), acl);
        // GETACLSTATUS operation to a file with a ACL
        acl = webhdfs.getAclStatus(new Path("/withAcl"));
        assertEquals(writtenAcls.get("/withAcl"), acl);
        // GETACLSTATUS operation to a file with several ACL entries
        acl = webhdfs.getAclStatus(new Path("/withSeveralAcls"));
        assertEquals(writtenAcls.get("/withSeveralAcls"), acl);
        // GETACLSTATUS operation to a invalid path
        URL url = new URL("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETACLSTATUS");
        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.setRequestMethod("GET");
        connection.connect();
        assertEquals(HttpURLConnection.HTTP_NOT_FOUND, connection.getResponseCode());
    } finally {
        // shutdown the viewer
        viewer.close();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HttpURLConnection(java.net.HttpURLConnection) Configuration(org.apache.hadoop.conf.Configuration) AclStatus(org.apache.hadoop.fs.permission.AclStatus) URI(java.net.URI) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) URL(java.net.URL) Test(org.junit.Test)

Example 10 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestOfflineImageViewerForContentSummary method testGetContentSummaryForEmptyDirectory.

@Test
public void testGetContentSummaryForEmptyDirectory() throws Exception {
    try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"))) {
        viewer.initServer(originalFsimage.getAbsolutePath());
        int port = viewer.getPort();
        URL url = new URL("http://localhost:" + port + "/webhdfs/v1/parentDir/childDir2?op=GETCONTENTSUMMARY");
        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.setRequestMethod("GET");
        connection.connect();
        assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
        // create a WebHdfsFileSystem instance
        URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
        Configuration conf = new Configuration();
        WebHdfsFileSystem webfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
        ContentSummary summary = webfs.getContentSummary(new Path("/parentDir/childDir2"));
        verifyContentSummary(emptyDirSummaryFromDFS, summary);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HttpURLConnection(java.net.HttpURLConnection) Configuration(org.apache.hadoop.conf.Configuration) ContentSummary(org.apache.hadoop.fs.ContentSummary) URI(java.net.URI) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) URL(java.net.URL) Test(org.junit.Test)

Aggregations

WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)21 Test (org.junit.Test)19 Path (org.apache.hadoop.fs.Path)18 Configuration (org.apache.hadoop.conf.Configuration)10 URI (java.net.URI)9 HttpURLConnection (java.net.HttpURLConnection)5 URL (java.net.URL)5 ContentSummary (org.apache.hadoop.fs.ContentSummary)5 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 FileStatus (org.apache.hadoop.fs.FileStatus)3 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 FsShell (org.apache.hadoop.fs.FsShell)2 HdfsAdmin (org.apache.hadoop.hdfs.client.HdfsAdmin)2 DelegationTokenIdentifier (org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier)2 Text (org.apache.hadoop.io.Text)2 AccessControlException (org.apache.hadoop.security.AccessControlException)2 Credentials (org.apache.hadoop.security.Credentials)2 Token (org.apache.hadoop.security.token.Token)2 Mockito.anyString (org.mockito.Mockito.anyString)2