use of org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.HdfsFileStatusProto.FileType in project hadoop by apache.
the class PBHelperClient method convert.
public static HdfsFileStatusProto convert(HdfsFileStatus fs) {
if (fs == null)
return null;
FileType fType = FileType.IS_FILE;
if (fs.isDir()) {
fType = FileType.IS_DIR;
} else if (fs.isSymlink()) {
fType = FileType.IS_SYMLINK;
}
HdfsFileStatusProto.Builder builder = HdfsFileStatusProto.newBuilder().setLength(fs.getLen()).setFileType(fType).setBlockReplication(fs.getReplication()).setBlocksize(fs.getBlockSize()).setModificationTime(fs.getModificationTime()).setAccessTime(fs.getAccessTime()).setPermission(convert(fs.getPermission())).setOwner(fs.getOwner()).setGroup(fs.getGroup()).setFileId(fs.getFileId()).setChildrenNum(fs.getChildrenNum()).setPath(getByteString(fs.getLocalNameInBytes())).setStoragePolicy(fs.getStoragePolicy());
if (fs.isSymlink()) {
builder.setSymlink(getByteString(fs.getSymlinkInBytes()));
}
if (fs.getFileEncryptionInfo() != null) {
builder.setFileEncryptionInfo(convert(fs.getFileEncryptionInfo()));
}
if (fs instanceof HdfsLocatedFileStatus) {
final HdfsLocatedFileStatus lfs = (HdfsLocatedFileStatus) fs;
LocatedBlocks locations = lfs.getBlockLocations();
if (locations != null) {
builder.setLocations(convert(locations));
}
}
if (fs.getErasureCodingPolicy() != null) {
builder.setEcPolicy(convertErasureCodingPolicy(fs.getErasureCodingPolicy()));
}
return builder.build();
}
Aggregations