Search in sources :

Example 51 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestQuotaByStorageType method testQuotaByStorageTypeWithFileCreateAppend.

@Test(timeout = 60000)
public void testQuotaByStorageTypeWithFileCreateAppend() throws Exception {
    final Path foo = new Path(dir, "foo");
    Path createdFile1 = new Path(foo, "created_file1.data");
    dfs.mkdirs(foo);
    // set storage policy on directory "foo" to ONESSD
    dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
    // set quota by storage type on directory "foo"
    dfs.setQuotaByStorageType(foo, StorageType.SSD, BLOCKSIZE * 4);
    INode fnode = fsdir.getINode4Write(foo.toString());
    assertTrue(fnode.isDirectory());
    assertTrue(fnode.isQuotaSet());
    // Create file of size 2 * BLOCKSIZE under directory "foo"
    long file1Len = BLOCKSIZE * 2;
    int bufLen = BLOCKSIZE / 16;
    DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
    // Verify space consumed and remaining quota
    long ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature().getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
    assertEquals(file1Len, ssdConsumed);
    // append several blocks
    int appendLen = BLOCKSIZE * 2;
    DFSTestUtil.appendFile(dfs, createdFile1, appendLen);
    file1Len += appendLen;
    ssdConsumed = fnode.asDirectory().getDirectoryWithQuotaFeature().getSpaceConsumed().getTypeSpaces().get(StorageType.SSD);
    assertEquals(file1Len, ssdConsumed);
    ContentSummary cs = dfs.getContentSummary(foo);
    assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
    assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
    assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
Also used : Path(org.apache.hadoop.fs.Path) ContentSummary(org.apache.hadoop.fs.ContentSummary) Test(org.junit.Test)

Example 52 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestQuotaByStorageType method testContentSummaryWithoutQuotaByStorageType.

@Test(timeout = 60000)
public void testContentSummaryWithoutQuotaByStorageType() throws Exception {
    final Path foo = new Path(dir, "foo");
    Path createdFile1 = new Path(foo, "created_file1.data");
    dfs.mkdirs(foo);
    // set storage policy on directory "foo" to ONESSD
    dfs.setStoragePolicy(foo, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
    INode fnode = fsdir.getINode4Write(foo.toString());
    assertTrue(fnode.isDirectory());
    assertTrue(!fnode.isQuotaSet());
    // Create file of size 2 * BLOCKSIZE under directory "foo"
    long file1Len = BLOCKSIZE * 2;
    int bufLen = BLOCKSIZE / 16;
    DFSTestUtil.createFile(dfs, createdFile1, bufLen, file1Len, BLOCKSIZE, REPLICATION, seed);
    // Verify getContentSummary without any quota set
    ContentSummary cs = dfs.getContentSummary(foo);
    assertEquals(cs.getSpaceConsumed(), file1Len * REPLICATION);
    assertEquals(cs.getTypeConsumed(StorageType.SSD), file1Len);
    assertEquals(cs.getTypeConsumed(StorageType.DISK), file1Len * 2);
}
Also used : Path(org.apache.hadoop.fs.Path) ContentSummary(org.apache.hadoop.fs.ContentSummary) Test(org.junit.Test)

Example 53 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestOfflineImageViewerForContentSummary method testGetContentSummaryForDirContainsSymlink.

@Test
public void testGetContentSummaryForDirContainsSymlink() throws Exception {
    try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"))) {
        viewer.initServer(originalFsimage.getAbsolutePath());
        int port = viewer.getPort();
        // create a WebHdfsFileSystem instance
        URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
        Configuration conf = new Configuration();
        WebHdfsFileSystem webfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
        ContentSummary summary = webfs.getContentSummary(new Path("/dirForLinks/"));
        verifyContentSummary(symLinkSummaryForDirContainsFromDFS, summary);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) ContentSummary(org.apache.hadoop.fs.ContentSummary) URI(java.net.URI) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 54 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestOfflineImageViewerForContentSummary method testGetContentSummaryForFile.

@Test
public void testGetContentSummaryForFile() throws Exception {
    try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr("localhost:0"))) {
        viewer.initServer(originalFsimage.getAbsolutePath());
        int port = viewer.getPort();
        URL url = new URL("http://localhost:" + port + "/webhdfs/v1/parentDir/file1?op=GETCONTENTSUMMARY");
        HttpURLConnection connection = (HttpURLConnection) url.openConnection();
        connection.setRequestMethod("GET");
        connection.connect();
        assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
        // create a WebHdfsFileSystem instance
        URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
        Configuration conf = new Configuration();
        WebHdfsFileSystem webfs = (WebHdfsFileSystem) FileSystem.get(uri, conf);
        ContentSummary summary = webfs.getContentSummary(new Path("/parentDir/file1"));
        verifyContentSummary(fileSummaryFromDFS, summary);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HttpURLConnection(java.net.HttpURLConnection) Configuration(org.apache.hadoop.conf.Configuration) ContentSummary(org.apache.hadoop.fs.ContentSummary) URI(java.net.URI) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) URL(java.net.URL) Test(org.junit.Test)

Example 55 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestFSMainOperationsWebHdfs method testTruncate.

@Test
public void testTruncate() throws Exception {
    final short repl = 3;
    final int blockSize = 1024;
    final int numOfBlocks = 2;
    Path dir = getTestRootPath(fSys, "test/hadoop");
    Path file = getTestRootPath(fSys, "test/hadoop/file");
    final byte[] data = getFileData(numOfBlocks, blockSize);
    createFile(fSys, file, data, blockSize, repl);
    final int newLength = blockSize;
    boolean isReady = fSys.truncate(file, newLength);
    Assert.assertTrue("Recovery is not expected.", isReady);
    FileStatus fileStatus = fSys.getFileStatus(file);
    Assert.assertEquals(fileStatus.getLen(), newLength);
    AppendTestUtil.checkFullFile(fSys, file, newLength, data, file.toString());
    ContentSummary cs = fSys.getContentSummary(dir);
    Assert.assertEquals("Bad disk space usage", cs.getSpaceConsumed(), newLength * repl);
    Assert.assertTrue("Deleted", fSys.delete(dir, true));
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) ContentSummary(org.apache.hadoop.fs.ContentSummary) FSMainOperationsBaseTest(org.apache.hadoop.fs.FSMainOperationsBaseTest) Test(org.junit.Test)

Aggregations

ContentSummary (org.apache.hadoop.fs.ContentSummary)61 Path (org.apache.hadoop.fs.Path)42 Test (org.junit.Test)38 FileSystem (org.apache.hadoop.fs.FileSystem)10 IOException (java.io.IOException)9 Configuration (org.apache.hadoop.conf.Configuration)8 ArrayList (java.util.ArrayList)6 OutputStream (java.io.OutputStream)5 URI (java.net.URI)5 DSQuotaExceededException (org.apache.hadoop.hdfs.protocol.DSQuotaExceededException)5 QuotaExceededException (org.apache.hadoop.hdfs.protocol.QuotaExceededException)5 WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)5 JobConf (org.apache.hadoop.mapred.JobConf)5 HttpURLConnection (java.net.HttpURLConnection)4 HashMap (java.util.HashMap)4 Properties (java.util.Properties)4 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 NSQuotaExceededException (org.apache.hadoop.hdfs.protocol.NSQuotaExceededException)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 FileNotFoundException (java.io.FileNotFoundException)3