use of org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op in project hadoop by apache.
the class WebHdfsFileSystem method getContentSummary.
@Override
public ContentSummary getContentSummary(final Path p) throws IOException {
statistics.incrementReadOps(1);
storageStatistics.incrementOpCounter(OpType.GET_CONTENT_SUMMARY);
final HttpOpParam.Op op = GetOpParam.Op.GETCONTENTSUMMARY;
return new FsPathResponseRunner<ContentSummary>(op, p) {
@Override
ContentSummary decodeResponse(Map<?, ?> json) {
return JsonUtilClient.toContentSummary(json);
}
}.run();
}
use of org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op in project hadoop by apache.
the class WebHdfsFileSystem method createSnapshot.
@Override
public Path createSnapshot(final Path path, final String snapshotName) throws IOException {
statistics.incrementWriteOps(1);
storageStatistics.incrementOpCounter(OpType.CREATE_SNAPSHOT);
final HttpOpParam.Op op = PutOpParam.Op.CREATESNAPSHOT;
return new FsPathResponseRunner<Path>(op, path, new SnapshotNameParam(snapshotName)) {
@Override
Path decodeResponse(Map<?, ?> json) {
return new Path((String) json.get(Path.class.getSimpleName()));
}
}.run();
}
use of org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op in project hadoop by apache.
the class WebHdfsFileSystem method removeAcl.
@Override
public void removeAcl(Path path) throws IOException {
statistics.incrementWriteOps(1);
storageStatistics.incrementOpCounter(OpType.REMOVE_ACL);
final HttpOpParam.Op op = PutOpParam.Op.REMOVEACL;
new FsPathRunner(op, path).run();
}
use of org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op in project hadoop by apache.
the class WebHdfsFileSystem method create.
@Override
public FSDataOutputStream create(final Path f, final FsPermission permission, final boolean overwrite, final int bufferSize, final short replication, final long blockSize, final Progressable progress) throws IOException {
statistics.incrementWriteOps(1);
storageStatistics.incrementOpCounter(OpType.CREATE);
final FsPermission modes = applyUMask(permission);
final HttpOpParam.Op op = PutOpParam.Op.CREATE;
return new FsPathOutputStreamRunner(op, f, bufferSize, new PermissionParam(modes.getMasked()), new UnmaskedPermissionParam(modes.getUnmasked()), new OverwriteParam(overwrite), new BufferSizeParam(bufferSize), new ReplicationParam(replication), new BlockSizeParam(blockSize)).run();
}
use of org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op in project hadoop by apache.
the class WebHdfsFileSystem method getTrashRoot.
@Override
public Path getTrashRoot(Path path) {
statistics.incrementReadOps(1);
storageStatistics.incrementOpCounter(OpType.GET_TRASH_ROOT);
final HttpOpParam.Op op = GetOpParam.Op.GETTRASHROOT;
try {
String strTrashPath = new FsPathResponseRunner<String>(op, path) {
@Override
String decodeResponse(Map<?, ?> json) throws IOException {
return JsonUtilClient.getPath(json);
}
}.run();
return new Path(strTrashPath).makeQualified(getUri(), null);
} catch (IOException e) {
LOG.warn("Cannot find trash root of " + path, e);
// keep the same behavior with dfs
return super.getTrashRoot(path).makeQualified(getUri(), null);
}
}
Aggregations