use of org.apache.hadoop.util.LimitInputStream in project hadoop by apache.
the class FileDistributionCalculator method visit.
void visit(RandomAccessFile file) throws IOException {
if (!FSImageUtil.checkFileFormat(file)) {
throw new IOException("Unrecognized FSImage");
}
FileSummary summary = FSImageUtil.loadSummary(file);
try (FileInputStream in = new FileInputStream(file.getFD())) {
for (FileSummary.Section s : summary.getSectionsList()) {
if (SectionName.fromString(s.getName()) != SectionName.INODE) {
continue;
}
in.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(in, s.getLength())));
run(is);
output();
}
}
}
use of org.apache.hadoop.util.LimitInputStream in project hadoop by apache.
the class PBImageXmlWriter method visit.
public void visit(RandomAccessFile file) throws IOException {
if (!FSImageUtil.checkFileFormat(file)) {
throw new IOException("Unrecognized FSImage");
}
FileSummary summary = FSImageUtil.loadSummary(file);
try (FileInputStream fin = new FileInputStream(file.getFD())) {
out.print("<?xml version=\"1.0\"?>\n<fsimage>");
out.print("<version>");
o("layoutVersion", summary.getLayoutVersion());
o("onDiskVersion", summary.getOndiskVersion());
// Output the version of OIV (which is not necessarily the version of
// the fsimage file). This could be helpful in the case where a bug
// in OIV leads to information loss in the XML-- we can quickly tell
// if a specific fsimage XML file is affected by this bug.
o("oivRevision", VersionInfo.getRevision());
out.print("</version>\n");
ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
Collections.sort(sections, new Comparator<FileSummary.Section>() {
@Override
public int compare(FileSummary.Section s1, FileSummary.Section s2) {
SectionName n1 = SectionName.fromString(s1.getName());
SectionName n2 = SectionName.fromString(s2.getName());
if (n1 == null) {
return n2 == null ? 0 : -1;
} else if (n2 == null) {
return -1;
} else {
return n1.ordinal() - n2.ordinal();
}
}
});
for (FileSummary.Section s : sections) {
fin.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength())));
switch(SectionName.fromString(s.getName())) {
case NS_INFO:
dumpNameSection(is);
break;
case STRING_TABLE:
loadStringTable(is);
break;
case INODE:
dumpINodeSection(is);
break;
case INODE_REFERENCE:
dumpINodeReferenceSection(is);
break;
case INODE_DIR:
dumpINodeDirectorySection(is);
break;
case FILES_UNDERCONSTRUCTION:
dumpFileUnderConstructionSection(is);
break;
case SNAPSHOT:
dumpSnapshotSection(is);
break;
case SNAPSHOT_DIFF:
dumpSnapshotDiffSection(is);
break;
case SECRET_MANAGER:
dumpSecretManagerSection(is);
break;
case CACHE_MANAGER:
dumpCacheManagerSection(is);
break;
default:
break;
}
}
out.print("</fsimage>\n");
}
}
use of org.apache.hadoop.util.LimitInputStream in project hadoop by apache.
the class PBImageTextWriter method loadDirectories.
/** Load the directories in the INode section. */
private void loadDirectories(FileInputStream fin, List<FileSummary.Section> sections, FileSummary summary, Configuration conf) throws IOException {
LOG.info("Loading directories");
long startTime = Time.monotonicNow();
for (FileSummary.Section section : sections) {
if (SectionName.fromString(section.getName()) == SectionName.INODE) {
fin.getChannel().position(section.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
loadDirectoriesInINodeSection(is);
}
}
long timeTaken = Time.monotonicNow() - startTime;
LOG.info("Finished loading directories in {}ms", timeTaken);
}
use of org.apache.hadoop.util.LimitInputStream in project hadoop by apache.
the class PBImageTextWriter method visit.
public void visit(RandomAccessFile file) throws IOException {
Configuration conf = new Configuration();
if (!FSImageUtil.checkFileFormat(file)) {
throw new IOException("Unrecognized FSImage");
}
FileSummary summary = FSImageUtil.loadSummary(file);
try (FileInputStream fin = new FileInputStream(file.getFD())) {
InputStream is;
ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
Collections.sort(sections, new Comparator<FileSummary.Section>() {
@Override
public int compare(FsImageProto.FileSummary.Section s1, FsImageProto.FileSummary.Section s2) {
FSImageFormatProtobuf.SectionName n1 = FSImageFormatProtobuf.SectionName.fromString(s1.getName());
FSImageFormatProtobuf.SectionName n2 = FSImageFormatProtobuf.SectionName.fromString(s2.getName());
if (n1 == null) {
return n2 == null ? 0 : -1;
} else if (n2 == null) {
return -1;
} else {
return n1.ordinal() - n2.ordinal();
}
}
});
ImmutableList<Long> refIdList = null;
for (FileSummary.Section section : sections) {
fin.getChannel().position(section.getOffset());
is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
switch(SectionName.fromString(section.getName())) {
case STRING_TABLE:
LOG.info("Loading string table");
stringTable = FSImageLoader.loadStringTable(is);
break;
case INODE_REFERENCE:
// Load INodeReference so that all INodes can be processed.
// Snapshots are not handled and will just be ignored for now.
LOG.info("Loading inode references");
refIdList = FSImageLoader.loadINodeReferenceSection(is);
break;
default:
break;
}
}
loadDirectories(fin, sections, summary, conf);
loadINodeDirSection(fin, sections, summary, conf, refIdList);
metadataMap.sync();
output(conf, summary, fin, sections);
}
}
Aggregations