Search in sources :

Example 6 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class PBImageXmlWriter method visit.

public void visit(RandomAccessFile file) throws IOException {
    if (!FSImageUtil.checkFileFormat(file)) {
        throw new IOException("Unrecognized FSImage");
    }
    FileSummary summary = FSImageUtil.loadSummary(file);
    try (FileInputStream fin = new FileInputStream(file.getFD())) {
        out.print("<?xml version=\"1.0\"?>\n<fsimage>");
        out.print("<version>");
        o("layoutVersion", summary.getLayoutVersion());
        o("onDiskVersion", summary.getOndiskVersion());
        // Output the version of OIV (which is not necessarily the version of
        // the fsimage file).  This could be helpful in the case where a bug
        // in OIV leads to information loss in the XML-- we can quickly tell
        // if a specific fsimage XML file is affected by this bug.
        o("oivRevision", VersionInfo.getRevision());
        out.print("</version>\n");
        ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
        Collections.sort(sections, new Comparator<FileSummary.Section>() {

            @Override
            public int compare(FileSummary.Section s1, FileSummary.Section s2) {
                SectionName n1 = SectionName.fromString(s1.getName());
                SectionName n2 = SectionName.fromString(s2.getName());
                if (n1 == null) {
                    return n2 == null ? 0 : -1;
                } else if (n2 == null) {
                    return -1;
                } else {
                    return n1.ordinal() - n2.ordinal();
                }
            }
        });
        for (FileSummary.Section s : sections) {
            fin.getChannel().position(s.getOffset());
            InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength())));
            switch(SectionName.fromString(s.getName())) {
                case NS_INFO:
                    dumpNameSection(is);
                    break;
                case STRING_TABLE:
                    loadStringTable(is);
                    break;
                case INODE:
                    dumpINodeSection(is);
                    break;
                case INODE_REFERENCE:
                    dumpINodeReferenceSection(is);
                    break;
                case INODE_DIR:
                    dumpINodeDirectorySection(is);
                    break;
                case FILES_UNDERCONSTRUCTION:
                    dumpFileUnderConstructionSection(is);
                    break;
                case SNAPSHOT:
                    dumpSnapshotSection(is);
                    break;
                case SNAPSHOT_DIFF:
                    dumpSnapshotDiffSection(is);
                    break;
                case SECRET_MANAGER:
                    dumpSecretManagerSection(is);
                    break;
                case CACHE_MANAGER:
                    dumpCacheManagerSection(is);
                    break;
                default:
                    break;
            }
        }
        out.print("</fsimage>\n");
    }
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) SectionName(org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName) IOException(java.io.IOException) LimitInputStream(org.apache.hadoop.util.LimitInputStream) CacheManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) INodeSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) SnapshotSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection) StringTableSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection) INodeDirectorySection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection) SnapshotDiffSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection) INodeReferenceSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection) NameSystemSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection) SecretManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection) FileInputStream(java.io.FileInputStream)

Example 7 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class PBImageTextWriter method loadDirectories.

/** Load the directories in the INode section. */
private void loadDirectories(FileInputStream fin, List<FileSummary.Section> sections, FileSummary summary, Configuration conf) throws IOException {
    LOG.info("Loading directories");
    long startTime = Time.monotonicNow();
    for (FileSummary.Section section : sections) {
        if (SectionName.fromString(section.getName()) == SectionName.INODE) {
            fin.getChannel().position(section.getOffset());
            InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
            loadDirectoriesInINodeSection(is);
        }
    }
    long timeTaken = Time.monotonicNow() - startTime;
    LOG.info("Finished loading directories in {}ms", timeTaken);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) LimitInputStream(org.apache.hadoop.util.LimitInputStream)

Example 8 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class PBImageTextWriter method visit.

public void visit(RandomAccessFile file) throws IOException {
    Configuration conf = new Configuration();
    if (!FSImageUtil.checkFileFormat(file)) {
        throw new IOException("Unrecognized FSImage");
    }
    FileSummary summary = FSImageUtil.loadSummary(file);
    try (FileInputStream fin = new FileInputStream(file.getFD())) {
        InputStream is;
        ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
        Collections.sort(sections, new Comparator<FileSummary.Section>() {

            @Override
            public int compare(FsImageProto.FileSummary.Section s1, FsImageProto.FileSummary.Section s2) {
                FSImageFormatProtobuf.SectionName n1 = FSImageFormatProtobuf.SectionName.fromString(s1.getName());
                FSImageFormatProtobuf.SectionName n2 = FSImageFormatProtobuf.SectionName.fromString(s2.getName());
                if (n1 == null) {
                    return n2 == null ? 0 : -1;
                } else if (n2 == null) {
                    return -1;
                } else {
                    return n1.ordinal() - n2.ordinal();
                }
            }
        });
        ImmutableList<Long> refIdList = null;
        for (FileSummary.Section section : sections) {
            fin.getChannel().position(section.getOffset());
            is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
            switch(SectionName.fromString(section.getName())) {
                case STRING_TABLE:
                    LOG.info("Loading string table");
                    stringTable = FSImageLoader.loadStringTable(is);
                    break;
                case INODE_REFERENCE:
                    // Load INodeReference so that all INodes can be processed.
                    // Snapshots are not handled and will just be ignored for now.
                    LOG.info("Loading inode references");
                    refIdList = FSImageLoader.loadINodeReferenceSection(is);
                    break;
                default:
                    break;
            }
        }
        loadDirectories(fin, sections, summary, conf);
        loadINodeDirSection(fin, sections, summary, conf, refIdList);
        metadataMap.sync();
        output(conf, summary, fin, sections);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) LimitInputStream(org.apache.hadoop.util.LimitInputStream) INodeSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection) FileInputStream(java.io.FileInputStream) BufferedInputStream(java.io.BufferedInputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) SectionName(org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName)

Aggregations

FileSummary (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary)8 BufferedInputStream (java.io.BufferedInputStream)6 FileInputStream (java.io.FileInputStream)6 InputStream (java.io.InputStream)6 LimitInputStream (org.apache.hadoop.util.LimitInputStream)6 IOException (java.io.IOException)5 SectionName (org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName)2 INodeSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection)2 ByteString (com.google.protobuf.ByteString)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 HashSet (java.util.HashSet)1 XMLEvent (javax.xml.stream.events.XMLEvent)1 Configuration (org.apache.hadoop.conf.Configuration)1 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)1 INodeDirectorySection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection)1 INodeReferenceSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection)1 NameSystemSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection)1 SecretManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection)1 SnapshotDiffSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection)1 SnapshotSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection)1