Search in sources :

Example 1 with INodeDirectory

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory in project hadoop by apache.

the class PBImageDelimitedTextWriter method getEntry.

@Override
public String getEntry(String parent, INode inode) {
    StringBuffer buffer = new StringBuffer();
    String inodeName = inode.getName().toStringUtf8();
    Path path = new Path(parent.isEmpty() ? "/" : parent, inodeName.isEmpty() ? "/" : inodeName);
    buffer.append(path.toString());
    PermissionStatus p = null;
    boolean isDir = false;
    boolean hasAcl = false;
    switch(inode.getType()) {
        case FILE:
            INodeFile file = inode.getFile();
            p = getPermission(file.getPermission());
            hasAcl = file.hasAcl() && file.getAcl().getEntriesCount() > 0;
            append(buffer, file.getReplication());
            append(buffer, formatDate(file.getModificationTime()));
            append(buffer, formatDate(file.getAccessTime()));
            append(buffer, file.getPreferredBlockSize());
            append(buffer, file.getBlocksCount());
            append(buffer, FSImageLoader.getFileSize(file));
            // NS_QUOTA
            append(buffer, 0);
            // DS_QUOTA
            append(buffer, 0);
            break;
        case DIRECTORY:
            INodeDirectory dir = inode.getDirectory();
            p = getPermission(dir.getPermission());
            hasAcl = dir.hasAcl() && dir.getAcl().getEntriesCount() > 0;
            // Replication
            append(buffer, 0);
            append(buffer, formatDate(dir.getModificationTime()));
            // Access time.
            append(buffer, formatDate(0));
            // Block size.
            append(buffer, 0);
            // Num blocks.
            append(buffer, 0);
            // Num bytes.
            append(buffer, 0);
            append(buffer, dir.getNsQuota());
            append(buffer, dir.getDsQuota());
            isDir = true;
            break;
        case SYMLINK:
            INodeSymlink s = inode.getSymlink();
            p = getPermission(s.getPermission());
            // Replication
            append(buffer, 0);
            append(buffer, formatDate(s.getModificationTime()));
            append(buffer, formatDate(s.getAccessTime()));
            // Block size.
            append(buffer, 0);
            // Num blocks.
            append(buffer, 0);
            // Num bytes.
            append(buffer, 0);
            // NS_QUOTA
            append(buffer, 0);
            // DS_QUOTA
            append(buffer, 0);
            break;
        default:
            break;
    }
    assert p != null;
    String dirString = isDir ? "d" : "-";
    String aclString = hasAcl ? "+" : "";
    append(buffer, dirString + p.getPermission().toString() + aclString);
    append(buffer, p.getUserName());
    append(buffer, p.getGroupName());
    return buffer.toString();
}
Also used : Path(org.apache.hadoop.fs.Path) INodeDirectory(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory) INodeSymlink(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink) PermissionStatus(org.apache.hadoop.fs.permission.PermissionStatus) INodeFile(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile)

Aggregations

Path (org.apache.hadoop.fs.Path)1 PermissionStatus (org.apache.hadoop.fs.permission.PermissionStatus)1 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeDirectory)1 INodeFile (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeFile)1 INodeSymlink (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INodeSymlink)1