use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class HttpFSFileSystem method getContentSummary.
@Override
public ContentSummary getContentSummary(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
HttpURLConnection conn = getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
return new ContentSummary.Builder().length((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON)).fileCount((Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON)).directoryCount((Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON)).quota((Long) json.get(CONTENT_SUMMARY_QUOTA_JSON)).spaceConsumed((Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON)).spaceQuota((Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)).build();
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class FSNamesystem method getContentSummary.
/**
* Get the content summary for a specific file/dir.
*
* @param src The string representation of the path to the file
*
* @throws AccessControlException if access is denied
* @throws UnresolvedLinkException if a symlink is encountered.
* @throws FileNotFoundException if no file exists
* @throws StandbyException
* @throws IOException for issues with writing to the audit log
*
* @return object containing information regarding the file
* or null if file not found
*/
ContentSummary getContentSummary(final String src) throws IOException {
checkOperation(OperationCategory.READ);
final String operationName = "contentSummary";
readLock();
boolean success = true;
ContentSummary cs;
try {
checkOperation(OperationCategory.READ);
cs = FSDirStatAndListingOp.getContentSummary(dir, src);
} catch (AccessControlException ace) {
success = false;
logAuditEvent(success, operationName, src);
throw ace;
} finally {
readUnlock(operationName);
}
logAuditEvent(success, operationName, src);
return cs;
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class TestHDFSConcat method testConcatWithQuotaDecrease.
/**
* make sure we update the quota correctly after concat
*/
@Test
public void testConcatWithQuotaDecrease() throws IOException {
// note this is different with REPL_FACTOR
final short srcRepl = 3;
final int srcNum = 10;
final Path foo = new Path("/foo");
final Path[] srcs = new Path[srcNum];
final Path target = new Path(foo, "target");
DFSTestUtil.createFile(dfs, target, blockSize, REPL_FACTOR, 0L);
dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
for (int i = 0; i < srcNum; i++) {
srcs[i] = new Path(foo, "src" + i);
DFSTestUtil.createFile(dfs, srcs[i], blockSize * 2, srcRepl, 0L);
}
ContentSummary summary = dfs.getContentSummary(foo);
Assert.assertEquals(11, summary.getFileCount());
Assert.assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * srcRepl * srcNum, summary.getSpaceConsumed());
dfs.concat(target, srcs);
summary = dfs.getContentSummary(foo);
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * REPL_FACTOR * srcNum, summary.getSpaceConsumed());
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class TestHDFSConcat method testConcatWithQuotaIncrease.
@Test
public void testConcatWithQuotaIncrease() throws IOException {
final short repl = 3;
final int srcNum = 10;
final Path foo = new Path("/foo");
final Path bar = new Path(foo, "bar");
final Path[] srcs = new Path[srcNum];
final Path target = new Path(bar, "target");
DFSTestUtil.createFile(dfs, target, blockSize, repl, 0L);
final long dsQuota = blockSize * repl + blockSize * srcNum * REPL_FACTOR;
dfs.setQuota(foo, Long.MAX_VALUE - 1, dsQuota);
for (int i = 0; i < srcNum; i++) {
srcs[i] = new Path(bar, "src" + i);
DFSTestUtil.createFile(dfs, srcs[i], blockSize, REPL_FACTOR, 0L);
}
ContentSummary summary = dfs.getContentSummary(bar);
Assert.assertEquals(11, summary.getFileCount());
Assert.assertEquals(dsQuota, summary.getSpaceConsumed());
try {
dfs.concat(target, srcs);
fail("QuotaExceededException expected");
} catch (RemoteException e) {
Assert.assertTrue(e.unwrapRemoteException() instanceof QuotaExceededException);
}
dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
dfs.concat(target, srcs);
summary = dfs.getContentSummary(bar);
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(blockSize * repl * (srcNum + 1), summary.getSpaceConsumed());
}
use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.
the class WebHdfsFileSystem method getContentSummary.
@Override
public ContentSummary getContentSummary(final Path p) throws IOException {
statistics.incrementReadOps(1);
storageStatistics.incrementOpCounter(OpType.GET_CONTENT_SUMMARY);
final HttpOpParam.Op op = GetOpParam.Op.GETCONTENTSUMMARY;
return new FsPathResponseRunner<ContentSummary>(op, p) {
@Override
ContentSummary decodeResponse(Map<?, ?> json) {
return JsonUtilClient.toContentSummary(json);
}
}.run();
}
Aggregations