Search in sources :

Example 1 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class PBImageTextWriter method loadINodeDirSection.

private void loadINodeDirSection(FileInputStream fin, List<FileSummary.Section> sections, FileSummary summary, Configuration conf, List<Long> refIdList) throws IOException {
    LOG.info("Loading INode directory section.");
    long startTime = Time.monotonicNow();
    for (FileSummary.Section section : sections) {
        if (SectionName.fromString(section.getName()) == SectionName.INODE_DIR) {
            fin.getChannel().position(section.getOffset());
            InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
            buildNamespace(is, refIdList);
        }
    }
    long timeTaken = Time.monotonicNow() - startTime;
    LOG.info("Finished loading INode directory section in {}ms", timeTaken);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) LimitInputStream(org.apache.hadoop.util.LimitInputStream)

Example 2 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class PBImageTextWriter method output.

private void output(Configuration conf, FileSummary summary, FileInputStream fin, ArrayList<FileSummary.Section> sections) throws IOException {
    InputStream is;
    long startTime = Time.monotonicNow();
    out.println(getHeader());
    for (FileSummary.Section section : sections) {
        if (SectionName.fromString(section.getName()) == SectionName.INODE) {
            fin.getChannel().position(section.getOffset());
            is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
            outputINodes(is);
        }
    }
    long timeTaken = Time.monotonicNow() - startTime;
    LOG.debug("Time to output inodes: {}ms", timeTaken);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) LimitInputStream(org.apache.hadoop.util.LimitInputStream)

Example 3 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class FSImageUtil method loadSummary.

public static FileSummary loadSummary(RandomAccessFile file) throws IOException {
    final int FILE_LENGTH_FIELD_SIZE = 4;
    long fileLength = file.length();
    file.seek(fileLength - FILE_LENGTH_FIELD_SIZE);
    int summaryLength = file.readInt();
    if (summaryLength <= 0) {
        throw new IOException("Negative length of the file");
    }
    file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength);
    byte[] summaryBytes = new byte[summaryLength];
    file.readFully(summaryBytes);
    FileSummary summary = FileSummary.parseDelimitedFrom(new ByteArrayInputStream(summaryBytes));
    if (summary.getOndiskVersion() != FILE_VERSION) {
        throw new IOException("Unsupported file version " + summary.getOndiskVersion());
    }
    if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT, summary.getLayoutVersion())) {
        throw new IOException("Unsupported layout version " + summary.getLayoutVersion());
    }
    return summary;
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) IOException(java.io.IOException)

Example 4 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class FileDistributionCalculator method visit.

void visit(RandomAccessFile file) throws IOException {
    if (!FSImageUtil.checkFileFormat(file)) {
        throw new IOException("Unrecognized FSImage");
    }
    FileSummary summary = FSImageUtil.loadSummary(file);
    try (FileInputStream in = new FileInputStream(file.getFD())) {
        for (FileSummary.Section s : summary.getSectionsList()) {
            if (SectionName.fromString(s.getName()) != SectionName.INODE) {
                continue;
            }
            in.getChannel().position(s.getOffset());
            InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(in, s.getLength())));
            run(is);
            output();
        }
    }
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) FileInputStream(java.io.FileInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) InputStream(java.io.InputStream) FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) IOException(java.io.IOException) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream)

Example 5 with FileSummary

use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.

the class OfflineImageReconstructor method processXml.

/**
   * Processes the XML file back into an fsimage.
   */
private void processXml() throws Exception {
    LOG.debug("Loading <fsimage>.");
    expectTag("fsimage", false);
    // Read the <version> tag.
    readVersion();
    // Write the HDFSIMG1 magic number which begins the fsimage file.
    out.write(FSImageUtil.MAGIC_HEADER);
    // Write a series of fsimage sections.
    sectionStartOffset = FSImageUtil.MAGIC_HEADER.length;
    final HashSet<String> unprocessedSections = new HashSet<>(sections.keySet());
    while (!unprocessedSections.isEmpty()) {
        XMLEvent ev = expectTag("[section header]", true);
        if (ev.getEventType() == XMLStreamConstants.END_ELEMENT) {
            if (ev.asEndElement().getName().getLocalPart().equals("fsimage")) {
                throw new IOException("FSImage XML ended prematurely, without " + "including section(s) " + StringUtils.join(", ", unprocessedSections));
            }
            throw new IOException("Got unexpected tag end event for " + ev.asEndElement().getName().getLocalPart() + " while looking " + "for section header tag.");
        } else if (ev.getEventType() != XMLStreamConstants.START_ELEMENT) {
            throw new IOException("Expected section header START_ELEMENT; " + "got event of type " + ev.getEventType());
        }
        String sectionName = ev.asStartElement().getName().getLocalPart();
        if (!unprocessedSections.contains(sectionName)) {
            throw new IOException("Unknown or duplicate section found for " + sectionName);
        }
        SectionProcessor sectionProcessor = sections.get(sectionName);
        if (sectionProcessor == null) {
            throw new IOException("Unknown FSImage section " + sectionName + ".  Valid section names are [" + StringUtils.join(", ", sections.keySet()) + "]");
        }
        unprocessedSections.remove(sectionName);
        sectionProcessor.process();
    }
    // Write the StringTable section to disk.
    // This has to be done after the other sections, since some of them
    // add entries to the string table.
    writeStringTableSection();
    // Write the FileSummary section to disk.
    // This section is always last.
    long prevOffset = out.getCount();
    FileSummary fileSummary = fileSummaryBld.build();
    if (LOG.isDebugEnabled()) {
        LOG.debug("Writing FileSummary: {" + TextFormat.printToString(fileSummary) + "}");
    }
    // Even though the last 4 bytes of the file gives the FileSummary length,
    // we still write a varint first that also contains the length.
    fileSummary.writeDelimitedTo(out);
    // Write the length of the FileSummary section as a fixed-size big
    // endian 4-byte quantity.
    int summaryLen = Ints.checkedCast(out.getCount() - prevOffset);
    byte[] summaryLenBytes = new byte[4];
    ByteBuffer.wrap(summaryLenBytes).asIntBuffer().put(summaryLen);
    out.write(summaryLenBytes);
}
Also used : FileSummary(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary) XMLEvent(javax.xml.stream.events.XMLEvent) ByteString(com.google.protobuf.ByteString) IOException(java.io.IOException) HashSet(java.util.HashSet)

Aggregations

FileSummary (org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary)8 BufferedInputStream (java.io.BufferedInputStream)6 FileInputStream (java.io.FileInputStream)6 InputStream (java.io.InputStream)6 LimitInputStream (org.apache.hadoop.util.LimitInputStream)6 IOException (java.io.IOException)5 SectionName (org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName)2 INodeSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection)2 ByteString (com.google.protobuf.ByteString)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 HashSet (java.util.HashSet)1 XMLEvent (javax.xml.stream.events.XMLEvent)1 Configuration (org.apache.hadoop.conf.Configuration)1 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)1 INodeDirectorySection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection)1 INodeReferenceSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection)1 NameSystemSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection)1 SecretManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection)1 SnapshotDiffSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection)1 SnapshotSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection)1