use of java.io.FileInputStream in project hadoop by apache.
the class PBImageTextWriter method loadINodeDirSection.
private void loadINodeDirSection(FileInputStream fin, List<FileSummary.Section> sections, FileSummary summary, Configuration conf, List<Long> refIdList) throws IOException {
LOG.info("Loading INode directory section.");
long startTime = Time.monotonicNow();
for (FileSummary.Section section : sections) {
if (SectionName.fromString(section.getName()) == SectionName.INODE_DIR) {
fin.getChannel().position(section.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
buildNamespace(is, refIdList);
}
}
long timeTaken = Time.monotonicNow() - startTime;
LOG.info("Finished loading INode directory section in {}ms", timeTaken);
}
use of java.io.FileInputStream in project hadoop by apache.
the class PBImageTextWriter method output.
private void output(Configuration conf, FileSummary summary, FileInputStream fin, ArrayList<FileSummary.Section> sections) throws IOException {
InputStream is;
long startTime = Time.monotonicNow();
out.println(getHeader());
for (FileSummary.Section section : sections) {
if (SectionName.fromString(section.getName()) == SectionName.INODE) {
fin.getChannel().position(section.getOffset());
is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, section.getLength())));
outputINodes(is);
}
}
long timeTaken = Time.monotonicNow() - startTime;
LOG.debug("Time to output inodes: {}ms", timeTaken);
}
use of java.io.FileInputStream in project hadoop by apache.
the class OfflineImageReconstructor method run.
/**
* Run the OfflineImageReconstructor.
*
* @param inputPath The input path to use.
* @param outputPath The output path to use.
*
* @throws Exception On error.
*/
public static void run(String inputPath, String outputPath) throws Exception {
MessageDigest digester = MD5Hash.getDigester();
FileOutputStream fout = null;
File foutHash = new File(outputPath + ".md5");
// delete any .md5 file that exists
Files.deleteIfExists(foutHash.toPath());
CountingOutputStream out = null;
FileInputStream fis = null;
InputStreamReader reader = null;
try {
Files.deleteIfExists(Paths.get(outputPath));
fout = new FileOutputStream(outputPath);
fis = new FileInputStream(inputPath);
reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
out = new CountingOutputStream(new DigestOutputStream(new BufferedOutputStream(fout), digester));
OfflineImageReconstructor oir = new OfflineImageReconstructor(out, reader);
oir.processXml();
} finally {
IOUtils.cleanup(LOG, reader, fis, out, fout);
}
// Write the md5 file
MD5FileUtils.saveMD5File(new File(outputPath), new MD5Hash(digester.digest()));
}
use of java.io.FileInputStream in project hadoop by apache.
the class OfflineImageViewer method go.
/**
* Process image file.
*/
public void go() throws IOException {
DataInputStream in = null;
PositionTrackingInputStream tracker = null;
ImageLoader fsip = null;
boolean done = false;
try {
tracker = new PositionTrackingInputStream(new BufferedInputStream(new FileInputStream(new File(inputFile))));
in = new DataInputStream(tracker);
int imageVersionFile = findImageVersion(in);
fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);
if (fsip == null)
throw new IOException("No image processor to read version " + imageVersionFile + " is available.");
fsip.loadImage(in, processor, skipBlocks);
done = true;
} finally {
if (!done) {
if (tracker != null) {
LOG.error("image loading failed at offset " + tracker.getPos());
} else {
LOG.error("Failed to load image file.");
}
}
IOUtils.cleanup(LOG, in, tracker);
}
}
use of java.io.FileInputStream in project hadoop by apache.
the class FSImageLoader method load.
/**
* Load fsimage into the memory.
* @param inputFile the filepath of the fsimage to load.
* @return FSImageLoader
* @throws IOException if failed to load fsimage.
*/
static FSImageLoader load(String inputFile) throws IOException {
Configuration conf = new Configuration();
RandomAccessFile file = new RandomAccessFile(inputFile, "r");
if (!FSImageUtil.checkFileFormat(file)) {
throw new IOException("Unrecognized FSImage");
}
FsImageProto.FileSummary summary = FSImageUtil.loadSummary(file);
try (FileInputStream fin = new FileInputStream(file.getFD())) {
// Map to record INodeReference to the referred id
ImmutableList<Long> refIdList = null;
String[] stringTable = null;
byte[][] inodes = null;
Map<Long, long[]> dirmap = null;
ArrayList<FsImageProto.FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
Collections.sort(sections, new Comparator<FsImageProto.FileSummary.Section>() {
@Override
public int compare(FsImageProto.FileSummary.Section s1, FsImageProto.FileSummary.Section s2) {
FSImageFormatProtobuf.SectionName n1 = FSImageFormatProtobuf.SectionName.fromString(s1.getName());
FSImageFormatProtobuf.SectionName n2 = FSImageFormatProtobuf.SectionName.fromString(s2.getName());
if (n1 == null) {
return n2 == null ? 0 : -1;
} else if (n2 == null) {
return -1;
} else {
return n1.ordinal() - n2.ordinal();
}
}
});
for (FsImageProto.FileSummary.Section s : sections) {
fin.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength())));
if (LOG.isDebugEnabled()) {
LOG.debug("Loading section " + s.getName() + " length: " + s.getLength());
}
switch(FSImageFormatProtobuf.SectionName.fromString(s.getName())) {
case STRING_TABLE:
stringTable = loadStringTable(is);
break;
case INODE:
inodes = loadINodeSection(is);
break;
case INODE_REFERENCE:
refIdList = loadINodeReferenceSection(is);
break;
case INODE_DIR:
dirmap = loadINodeDirectorySection(is, refIdList);
break;
default:
break;
}
}
return new FSImageLoader(stringTable, inodes, dirmap);
}
}
Aggregations