Search in sources :

Example 6 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class HttpFSFileSystem method getContentSummary.

@Override
public ContentSummary getContentSummary(Path f) throws IOException {
    Map<String, String> params = new HashMap<String, String>();
    params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
    HttpURLConnection conn = getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
    HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
    JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
    return new ContentSummary.Builder().length((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON)).fileCount((Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON)).directoryCount((Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON)).quota((Long) json.get(CONTENT_SUMMARY_QUOTA_JSON)).spaceConsumed((Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON)).spaceQuota((Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)).build();
}
Also used : HttpURLConnection(java.net.HttpURLConnection) JSONObject(org.json.simple.JSONObject) HashMap(java.util.HashMap) ContentSummary(org.apache.hadoop.fs.ContentSummary)

Example 7 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class FSNamesystem method getContentSummary.

/**
   * Get the content summary for a specific file/dir.
   *
   * @param src The string representation of the path to the file
   *
   * @throws AccessControlException if access is denied
   * @throws UnresolvedLinkException if a symlink is encountered.
   * @throws FileNotFoundException if no file exists
   * @throws StandbyException
   * @throws IOException for issues with writing to the audit log
   *
   * @return object containing information regarding the file
   *         or null if file not found
   */
ContentSummary getContentSummary(final String src) throws IOException {
    checkOperation(OperationCategory.READ);
    final String operationName = "contentSummary";
    readLock();
    boolean success = true;
    ContentSummary cs;
    try {
        checkOperation(OperationCategory.READ);
        cs = FSDirStatAndListingOp.getContentSummary(dir, src);
    } catch (AccessControlException ace) {
        success = false;
        logAuditEvent(success, operationName, src);
        throw ace;
    } finally {
        readUnlock(operationName);
    }
    logAuditEvent(success, operationName, src);
    return cs;
}
Also used : ContentSummary(org.apache.hadoop.fs.ContentSummary) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 8 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestHDFSConcat method testConcatWithQuotaDecrease.

/**
   * make sure we update the quota correctly after concat
   */
@Test
public void testConcatWithQuotaDecrease() throws IOException {
    // note this is different with REPL_FACTOR
    final short srcRepl = 3;
    final int srcNum = 10;
    final Path foo = new Path("/foo");
    final Path[] srcs = new Path[srcNum];
    final Path target = new Path(foo, "target");
    DFSTestUtil.createFile(dfs, target, blockSize, REPL_FACTOR, 0L);
    dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
    for (int i = 0; i < srcNum; i++) {
        srcs[i] = new Path(foo, "src" + i);
        DFSTestUtil.createFile(dfs, srcs[i], blockSize * 2, srcRepl, 0L);
    }
    ContentSummary summary = dfs.getContentSummary(foo);
    Assert.assertEquals(11, summary.getFileCount());
    Assert.assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * srcRepl * srcNum, summary.getSpaceConsumed());
    dfs.concat(target, srcs);
    summary = dfs.getContentSummary(foo);
    Assert.assertEquals(1, summary.getFileCount());
    Assert.assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * REPL_FACTOR * srcNum, summary.getSpaceConsumed());
}
Also used : Path(org.apache.hadoop.fs.Path) ContentSummary(org.apache.hadoop.fs.ContentSummary) Test(org.junit.Test)

Example 9 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class TestHDFSConcat method testConcatWithQuotaIncrease.

@Test
public void testConcatWithQuotaIncrease() throws IOException {
    final short repl = 3;
    final int srcNum = 10;
    final Path foo = new Path("/foo");
    final Path bar = new Path(foo, "bar");
    final Path[] srcs = new Path[srcNum];
    final Path target = new Path(bar, "target");
    DFSTestUtil.createFile(dfs, target, blockSize, repl, 0L);
    final long dsQuota = blockSize * repl + blockSize * srcNum * REPL_FACTOR;
    dfs.setQuota(foo, Long.MAX_VALUE - 1, dsQuota);
    for (int i = 0; i < srcNum; i++) {
        srcs[i] = new Path(bar, "src" + i);
        DFSTestUtil.createFile(dfs, srcs[i], blockSize, REPL_FACTOR, 0L);
    }
    ContentSummary summary = dfs.getContentSummary(bar);
    Assert.assertEquals(11, summary.getFileCount());
    Assert.assertEquals(dsQuota, summary.getSpaceConsumed());
    try {
        dfs.concat(target, srcs);
        fail("QuotaExceededException expected");
    } catch (RemoteException e) {
        Assert.assertTrue(e.unwrapRemoteException() instanceof QuotaExceededException);
    }
    dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
    dfs.concat(target, srcs);
    summary = dfs.getContentSummary(bar);
    Assert.assertEquals(1, summary.getFileCount());
    Assert.assertEquals(blockSize * repl * (srcNum + 1), summary.getSpaceConsumed());
}
Also used : Path(org.apache.hadoop.fs.Path) QuotaExceededException(org.apache.hadoop.hdfs.protocol.QuotaExceededException) ContentSummary(org.apache.hadoop.fs.ContentSummary) RemoteException(org.apache.hadoop.ipc.RemoteException) Test(org.junit.Test)

Example 10 with ContentSummary

use of org.apache.hadoop.fs.ContentSummary in project hadoop by apache.

the class WebHdfsFileSystem method getContentSummary.

@Override
public ContentSummary getContentSummary(final Path p) throws IOException {
    statistics.incrementReadOps(1);
    storageStatistics.incrementOpCounter(OpType.GET_CONTENT_SUMMARY);
    final HttpOpParam.Op op = GetOpParam.Op.GETCONTENTSUMMARY;
    return new FsPathResponseRunner<ContentSummary>(op, p) {

        @Override
        ContentSummary decodeResponse(Map<?, ?> json) {
            return JsonUtilClient.toContentSummary(json);
        }
    }.run();
}
Also used : ContentSummary(org.apache.hadoop.fs.ContentSummary) Op(org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op)

Aggregations

ContentSummary (org.apache.hadoop.fs.ContentSummary)61 Path (org.apache.hadoop.fs.Path)42 Test (org.junit.Test)38 FileSystem (org.apache.hadoop.fs.FileSystem)10 IOException (java.io.IOException)9 Configuration (org.apache.hadoop.conf.Configuration)8 ArrayList (java.util.ArrayList)6 OutputStream (java.io.OutputStream)5 URI (java.net.URI)5 DSQuotaExceededException (org.apache.hadoop.hdfs.protocol.DSQuotaExceededException)5 QuotaExceededException (org.apache.hadoop.hdfs.protocol.QuotaExceededException)5 WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)5 JobConf (org.apache.hadoop.mapred.JobConf)5 HttpURLConnection (java.net.HttpURLConnection)4 HashMap (java.util.HashMap)4 Properties (java.util.Properties)4 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 NSQuotaExceededException (org.apache.hadoop.hdfs.protocol.NSQuotaExceededException)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 FileNotFoundException (java.io.FileNotFoundException)3