use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.
the class PBImageTextWriter method loadINodeDirSection.
private void loadINodeDirSection(FileInputStream fin, List<FileSummary.Section> sections, FileSummary summary, Configuration conf, List<Long> refIdList) throws IOException {
LOG.info("Loading INode directory section.");
long startTime = Time.monotonicNow();
for (FileSummary.Section section : sections) {
if (SectionName.fromString(section.getName()) == SectionName.INODE_DIR) {
fin.getChannel().position(section.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
buildNamespace(is, refIdList);
}
}
long timeTaken = Time.monotonicNow() - startTime;
LOG.info("Finished loading INode directory section in {}ms", timeTaken);
}
use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.
the class PBImageTextWriter method output.
private void output(Configuration conf, FileSummary summary, FileInputStream fin, ArrayList<FileSummary.Section> sections) throws IOException {
InputStream is;
long startTime = Time.monotonicNow();
out.println(getHeader());
for (FileSummary.Section section : sections) {
if (SectionName.fromString(section.getName()) == SectionName.INODE) {
fin.getChannel().position(section.getOffset());
is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
outputINodes(is);
}
}
long timeTaken = Time.monotonicNow() - startTime;
LOG.debug("Time to output inodes: {}ms", timeTaken);
}
use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.
the class FSImageUtil method loadSummary.
public static FileSummary loadSummary(RandomAccessFile file) throws IOException {
final int FILE_LENGTH_FIELD_SIZE = 4;
long fileLength = file.length();
file.seek(fileLength - FILE_LENGTH_FIELD_SIZE);
int summaryLength = file.readInt();
if (summaryLength <= 0) {
throw new IOException("Negative length of the file");
}
file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength);
byte[] summaryBytes = new byte[summaryLength];
file.readFully(summaryBytes);
FileSummary summary = FileSummary.parseDelimitedFrom(new ByteArrayInputStream(summaryBytes));
if (summary.getOndiskVersion() != FILE_VERSION) {
throw new IOException("Unsupported file version " + summary.getOndiskVersion());
}
if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT, summary.getLayoutVersion())) {
throw new IOException("Unsupported layout version " + summary.getLayoutVersion());
}
return summary;
}
use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.
the class FileDistributionCalculator method visit.
void visit(RandomAccessFile file) throws IOException {
if (!FSImageUtil.checkFileFormat(file)) {
throw new IOException("Unrecognized FSImage");
}
FileSummary summary = FSImageUtil.loadSummary(file);
try (FileInputStream in = new FileInputStream(file.getFD())) {
for (FileSummary.Section s : summary.getSectionsList()) {
if (SectionName.fromString(s.getName()) != SectionName.INODE) {
continue;
}
in.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(in, s.getLength())));
run(is);
output();
}
}
}
use of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary in project hadoop by apache.
the class OfflineImageReconstructor method processXml.
/**
* Processes the XML file back into an fsimage.
*/
private void processXml() throws Exception {
LOG.debug("Loading <fsimage>.");
expectTag("fsimage", false);
// Read the <version> tag.
readVersion();
// Write the HDFSIMG1 magic number which begins the fsimage file.
out.write(FSImageUtil.MAGIC_HEADER);
// Write a series of fsimage sections.
sectionStartOffset = FSImageUtil.MAGIC_HEADER.length;
final HashSet<String> unprocessedSections = new HashSet<>(sections.keySet());
while (!unprocessedSections.isEmpty()) {
XMLEvent ev = expectTag("[section header]", true);
if (ev.getEventType() == XMLStreamConstants.END_ELEMENT) {
if (ev.asEndElement().getName().getLocalPart().equals("fsimage")) {
throw new IOException("FSImage XML ended prematurely, without " + "including section(s) " + StringUtils.join(", ", unprocessedSections));
}
throw new IOException("Got unexpected tag end event for " + ev.asEndElement().getName().getLocalPart() + " while looking " + "for section header tag.");
} else if (ev.getEventType() != XMLStreamConstants.START_ELEMENT) {
throw new IOException("Expected section header START_ELEMENT; " + "got event of type " + ev.getEventType());
}
String sectionName = ev.asStartElement().getName().getLocalPart();
if (!unprocessedSections.contains(sectionName)) {
throw new IOException("Unknown or duplicate section found for " + sectionName);
}
SectionProcessor sectionProcessor = sections.get(sectionName);
if (sectionProcessor == null) {
throw new IOException("Unknown FSImage section " + sectionName + ". Valid section names are [" + StringUtils.join(", ", sections.keySet()) + "]");
}
unprocessedSections.remove(sectionName);
sectionProcessor.process();
}
// Write the StringTable section to disk.
// This has to be done after the other sections, since some of them
// add entries to the string table.
writeStringTableSection();
// Write the FileSummary section to disk.
// This section is always last.
long prevOffset = out.getCount();
FileSummary fileSummary = fileSummaryBld.build();
if (LOG.isDebugEnabled()) {
LOG.debug("Writing FileSummary: {" + TextFormat.printToString(fileSummary) + "}");
}
// Even though the last 4 bytes of the file gives the FileSummary length,
// we still write a varint first that also contains the length.
fileSummary.writeDelimitedTo(out);
// Write the length of the FileSummary section as a fixed-size big
// endian 4-byte quantity.
int summaryLen = Ints.checkedCast(out.getCount() - prevOffset);
byte[] summaryLenBytes = new byte[4];
ByteBuffer.wrap(summaryLenBytes).asIntBuffer().put(summaryLen);
out.write(summaryLenBytes);
}
Aggregations