use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class TestWebHDFS method testContentSummary.
@Test
public void testContentSummary() throws Exception {
MiniDFSCluster cluster = null;
final Configuration conf = WebHdfsTestUtil.createConf();
final Path path = new Path("/QuotaDir");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
final DistributedFileSystem dfs = cluster.getFileSystem();
dfs.mkdirs(path);
dfs.setQuotaByStorageType(path, StorageType.DISK, 100000);
ContentSummary contentSummary = webHdfs.getContentSummary(path);
Assert.assertTrue((contentSummary.getTypeQuota(StorageType.DISK) == 100000));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class TestMetadata method testContentSummaryOnFile.
@Test
public void testContentSummaryOnFile() throws IOException {
Path child = new Path(UUID.randomUUID().toString());
Path testFile = new Path(parent, child);
OutputStream out = adlStore.create(testFile);
for (int i = 0; i < 1024; ++i) {
out.write(97);
}
out.close();
Assert.assertTrue(adlStore.isFile(testFile));
ContentSummary summary = adlStore.getContentSummary(testFile);
Assert.assertEquals(1024, summary.getSpaceConsumed());
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(0, summary.getDirectoryCount());
Assert.assertEquals(1024, summary.getLength());
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class TestMetadata method testContentSummaryOnFolder.
@Test
public void testContentSummaryOnFolder() throws IOException {
Path child = new Path(UUID.randomUUID().toString());
Path testFile = new Path(parent, child);
OutputStream out = adlStore.create(testFile);
for (int i = 0; i < 1024; ++i) {
out.write(97);
}
out.close();
Assert.assertTrue(adlStore.isFile(testFile));
ContentSummary summary = adlStore.getContentSummary(parent);
Assert.assertEquals(1024, summary.getSpaceConsumed());
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(1, summary.getDirectoryCount());
Assert.assertEquals(1024, summary.getLength());
}
use of org.apache.hadoop.fs.ContentSummary in project ignite by apache.
the class IgniteHadoopFileSystem method getContentSummary.
/** {@inheritDoc} */
@Override
public ContentSummary getContentSummary(Path f) throws IOException {
A.notNull(f, "f");
enterBusy();
try {
IgfsPathSummary sum = rmtClient.contentSummary(convert(f));
return new ContentSummary(sum.totalLength(), sum.filesCount(), sum.directoriesCount(), -1, sum.totalLength(), rmtClient.fsStatus().spaceTotal());
} finally {
leaveBusy();
}
}
use of org.apache.hadoop.fs.ContentSummary in project ignite by apache.
the class IgniteHadoopFileSystemAbstractSelfTest method compareContent.
/**
* Compare content of two folders.
*
* @param cfg Paths configuration to compare.
* @throws IOException If failed.
*/
@SuppressWarnings("deprecation")
private void compareContent(Config cfg) throws IOException {
Deque<Config> queue = new LinkedList<>();
queue.add(cfg);
for (Config c = queue.poll(); c != null; c = queue.poll()) {
boolean exists;
assertEquals("Check existence [src=" + c.src + ", dest=" + c.dest + ']', exists = c.srcFs.exists(c.src), c.destFs.exists(c.dest));
assertEquals("Check types (files?) [src=" + c.src + ", dest=" + c.dest + ']', c.srcFs.isFile(c.src), c.destFs.isFile(c.dest));
if (exists) {
ContentSummary srcSummary = c.srcFs.getContentSummary(c.src);
ContentSummary dstSummary = c.destFs.getContentSummary(c.dest);
assertEquals("Directories number comparison failed", srcSummary.getDirectoryCount(), dstSummary.getDirectoryCount());
assertEquals("Files number comparison failed", srcSummary.getFileCount(), dstSummary.getFileCount());
assertEquals("Space consumed comparison failed", srcSummary.getSpaceConsumed(), dstSummary.getSpaceConsumed());
assertEquals("Length comparison failed", srcSummary.getLength(), dstSummary.getLength());
// Intentionally skipping quotas checks as they can vary.
} else {
assertContentSummaryFails(c.srcFs, c.src);
assertContentSummaryFails(c.destFs, c.dest);
}
if (!exists)
continue;
FileStatus[] srcSt = c.srcFs.listStatus(c.src);
FileStatus[] destSt = c.destFs.listStatus(c.dest);
assert srcSt != null && destSt != null : "Both not null" + " [srcSt=" + Arrays.toString(srcSt) + ", destSt=" + Arrays.toString(destSt) + ']';
assertEquals("Check listing [src=" + c.src + ", dest=" + c.dest + ']', srcSt.length, destSt.length);
// Listing of the file returns the only element with this file.
if (srcSt.length == 1 && c.src.equals(srcSt[0].getPath())) {
assertEquals(c.dest, destSt[0].getPath());
assertTrue("Expects file [src=" + c.src + ", srcSt[0]=" + srcSt[0] + ']', !srcSt[0].isDir());
assertTrue("Expects file [dest=" + c.dest + ", destSt[0]=" + destSt[0] + ']', !destSt[0].isDir());
FSDataInputStream srcIn = null;
FSDataInputStream destIn = null;
try {
srcIn = c.srcFs.open(c.src);
destIn = c.destFs.open(c.dest);
GridTestIoUtils.assertEqualStreams(srcIn, destIn, srcSt[0].getLen());
} finally {
U.closeQuiet(srcIn);
U.closeQuiet(destIn);
}
// Skip the following directories validations.
continue;
}
// Sort both arrays.
Arrays.sort(srcSt, STATUS_COMPARATOR);
Arrays.sort(destSt, STATUS_COMPARATOR);
for (int i = 0; i < srcSt.length; i++) // Dig in deep to the last leaf, instead of collecting full tree in memory.
queue.addFirst(new Config(c.srcFs, srcSt[i].getPath(), c.destFs, destSt[i].getPath()));
// Add non-existent file to check in the current folder.
String rndFile = "Non-existent file #" + UUID.randomUUID().toString();
queue.addFirst(new Config(c.srcFs, new Path(c.src, rndFile), c.destFs, new Path(c.dest, rndFile)));
}
}
Aggregations