Search in sources :

Example 56 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestWebHDFS method testContentSummary.

@Test
public void testContentSummary() throws Exception {
    MiniDFSCluster cluster = null;
    final Configuration conf = WebHdfsTestUtil.createConf();
    final Path path = new Path("/QuotaDir");
    try {
        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
        final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
        final DistributedFileSystem dfs = cluster.getFileSystem();
        dfs.mkdirs(path);
        dfs.setQuotaByStorageType(path, StorageType.DISK, 100000);
        ContentSummary contentSummary = webHdfs.getContentSummary(path);
        Assert.assertTrue((contentSummary.getTypeQuota(StorageType.DISK) == 100000));
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) ContentSummary(org.apache.hadoop.fs.ContentSummary) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Test(org.junit.Test) HttpServerFunctionalTest(org.apache.hadoop.http.HttpServerFunctionalTest)

Example 57 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestMetadata method testContentSummaryOnFile.

@Test
public void testContentSummaryOnFile() throws IOException {
    Path child = new Path(UUID.randomUUID().toString());
    Path testFile = new Path(parent, child);
    OutputStream out = adlStore.create(testFile);
    for (int i = 0; i < 1024; ++i) {
        out.write(97);
    }
    out.close();
    Assert.assertTrue(adlStore.isFile(testFile));
    ContentSummary summary = adlStore.getContentSummary(testFile);
    Assert.assertEquals(1024, summary.getSpaceConsumed());
    Assert.assertEquals(1, summary.getFileCount());
    Assert.assertEquals(0, summary.getDirectoryCount());
    Assert.assertEquals(1024, summary.getLength());
}
Also used : Path(org.apache.hadoop.fs.Path) OutputStream(java.io.OutputStream) ContentSummary(org.apache.hadoop.fs.ContentSummary) Test(org.junit.Test)

Example 58 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestMetadata method testContentSummaryOnFolder.

@Test
public void testContentSummaryOnFolder() throws IOException {
    Path child = new Path(UUID.randomUUID().toString());
    Path testFile = new Path(parent, child);
    OutputStream out = adlStore.create(testFile);
    for (int i = 0; i < 1024; ++i) {
        out.write(97);
    }
    out.close();
    Assert.assertTrue(adlStore.isFile(testFile));
    ContentSummary summary = adlStore.getContentSummary(parent);
    Assert.assertEquals(1024, summary.getSpaceConsumed());
    Assert.assertEquals(1, summary.getFileCount());
    Assert.assertEquals(1, summary.getDirectoryCount());
    Assert.assertEquals(1024, summary.getLength());
}
Also used : Path(org.apache.hadoop.fs.Path) OutputStream(java.io.OutputStream) ContentSummary(org.apache.hadoop.fs.ContentSummary) Test(org.junit.Test)

Example 59 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project ignite by apache.

the class IgniteHadoopFileSystem method getContentSummary.

/** {@inheritDoc} */
@Override
public ContentSummary getContentSummary(Path f) throws IOException {
    A.notNull(f, "f");
    enterBusy();
    try {
        IgfsPathSummary sum = rmtClient.contentSummary(convert(f));
        return new ContentSummary(sum.totalLength(), sum.filesCount(), sum.directoriesCount(), -1, sum.totalLength(), rmtClient.fsStatus().spaceTotal());
    } finally {
        leaveBusy();
    }
}
Also used : IgfsPathSummary(org.apache.ignite.igfs.IgfsPathSummary) ContentSummary(org.apache.hadoop.fs.ContentSummary)

Example 60 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project ignite by apache.

the class IgniteHadoopFileSystemAbstractSelfTest method compareContent.

/**
     * Compare content of two folders.
     *
     * @param cfg Paths configuration to compare.
     * @throws IOException If failed.
     */
@SuppressWarnings("deprecation")
private void compareContent(Config cfg) throws IOException {
    Deque<Config> queue = new LinkedList<>();
    queue.add(cfg);
    for (Config c = queue.poll(); c != null; c = queue.poll()) {
        boolean exists;
        assertEquals("Check existence [src=" + c.src + ", dest=" + c.dest + ']', exists = c.srcFs.exists(c.src), c.destFs.exists(c.dest));
        assertEquals("Check types (files?) [src=" + c.src + ", dest=" + c.dest + ']', c.srcFs.isFile(c.src), c.destFs.isFile(c.dest));
        if (exists) {
            ContentSummary srcSummary = c.srcFs.getContentSummary(c.src);
            ContentSummary dstSummary = c.destFs.getContentSummary(c.dest);
            assertEquals("Directories number comparison failed", srcSummary.getDirectoryCount(), dstSummary.getDirectoryCount());
            assertEquals("Files number comparison failed", srcSummary.getFileCount(), dstSummary.getFileCount());
            assertEquals("Space consumed comparison failed", srcSummary.getSpaceConsumed(), dstSummary.getSpaceConsumed());
            assertEquals("Length comparison failed", srcSummary.getLength(), dstSummary.getLength());
        // Intentionally skipping quotas checks as they can vary.
        } else {
            assertContentSummaryFails(c.srcFs, c.src);
            assertContentSummaryFails(c.destFs, c.dest);
        }
        if (!exists)
            continue;
        FileStatus[] srcSt = c.srcFs.listStatus(c.src);
        FileStatus[] destSt = c.destFs.listStatus(c.dest);
        assert srcSt != null && destSt != null : "Both not null" + " [srcSt=" + Arrays.toString(srcSt) + ", destSt=" + Arrays.toString(destSt) + ']';
        assertEquals("Check listing [src=" + c.src + ", dest=" + c.dest + ']', srcSt.length, destSt.length);
        // Listing of the file returns the only element with this file.
        if (srcSt.length == 1 && c.src.equals(srcSt[0].getPath())) {
            assertEquals(c.dest, destSt[0].getPath());
            assertTrue("Expects file [src=" + c.src + ", srcSt[0]=" + srcSt[0] + ']', !srcSt[0].isDir());
            assertTrue("Expects file [dest=" + c.dest + ", destSt[0]=" + destSt[0] + ']', !destSt[0].isDir());
            FSDataInputStream srcIn = null;
            FSDataInputStream destIn = null;
            try {
                srcIn = c.srcFs.open(c.src);
                destIn = c.destFs.open(c.dest);
                GridTestIoUtils.assertEqualStreams(srcIn, destIn, srcSt[0].getLen());
            } finally {
                U.closeQuiet(srcIn);
                U.closeQuiet(destIn);
            }
            // Skip the following directories validations.
            continue;
        }
        // Sort both arrays.
        Arrays.sort(srcSt, STATUS_COMPARATOR);
        Arrays.sort(destSt, STATUS_COMPARATOR);
        for (int i = 0; i < srcSt.length; i++) // Dig in deep to the last leaf, instead of collecting full tree in memory.
        queue.addFirst(new Config(c.srcFs, srcSt[i].getPath(), c.destFs, destSt[i].getPath()));
        // Add non-existent file to check in the current folder.
        String rndFile = "Non-existent file #" + UUID.randomUUID().toString();
        queue.addFirst(new Config(c.srcFs, new Path(c.src, rndFile), c.destFs, new Path(c.dest, rndFile)));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) IgfsPath(org.apache.ignite.igfs.IgfsPath) FileStatus(org.apache.hadoop.fs.FileStatus) ContentSummary(org.apache.hadoop.fs.ContentSummary) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) LinkedList(java.util.LinkedList)

Aggregations

ContentSummary (org.apache.hadoop.fs.ContentSummary)61 Path (org.apache.hadoop.fs.Path)42 Test (org.junit.Test)38 FileSystem (org.apache.hadoop.fs.FileSystem)10 IOException (java.io.IOException)9 Configuration (org.apache.hadoop.conf.Configuration)8 ArrayList (java.util.ArrayList)6 OutputStream (java.io.OutputStream)5 URI (java.net.URI)5 DSQuotaExceededException (org.apache.hadoop.hdfs.protocol.DSQuotaExceededException)5 QuotaExceededException (org.apache.hadoop.hdfs.protocol.QuotaExceededException)5 WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)5 JobConf (org.apache.hadoop.mapred.JobConf)5 HttpURLConnection (java.net.HttpURLConnection)4 HashMap (java.util.HashMap)4 Properties (java.util.Properties)4 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 NSQuotaExceededException (org.apache.hadoop.hdfs.protocol.NSQuotaExceededException)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 FileNotFoundException (java.io.FileNotFoundException)3