Search in sources :

Example 26 with RandomAccessFile

use of java.io.RandomAccessFile in project hadoop by apache.

the class Storage method writeProperties.

public static void writeProperties(File to, Properties props) throws IOException {
    try (RandomAccessFile file = new RandomAccessFile(to, "rws");
        FileOutputStream out = new FileOutputStream(file.getFD())) {
        file.seek(0);
        /*
       * If server is interrupted before this line,
       * the version file will remain unchanged.
       */
        props.store(out, null);
        /*
       * Now the new fields are flushed to the head of the file, but file
       * length can still be larger then required and therefore the file can
       * contain whole or corrupted fields from its old contents in the end.
       * If server is interrupted here and restarted later these extra fields
       * either should not effect server behavior or should be handled
       * by the server correctly.
       */
        file.setLength(out.getChannel().position());
    }
}
Also used : RandomAccessFile(java.io.RandomAccessFile) FileOutputStream(java.io.FileOutputStream)

Example 27 with RandomAccessFile

use of java.io.RandomAccessFile in project hadoop by apache.

the class OfflineImageViewerPB method run.

public static int run(String[] args) throws Exception {
    Options options = buildOptions();
    if (args.length == 0) {
        printUsage();
        return 0;
    }
    // print help and exit with zero exit code
    if (args.length == 1 && isHelpOption(args[0])) {
        printUsage();
        return 0;
    }
    CommandLineParser parser = new PosixParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Error parsing command-line options: ");
        printUsage();
        return -1;
    }
    if (cmd.hasOption("h")) {
        // print help and exit with non zero exit code since
        // it is not expected to give help and other options together.
        printUsage();
        return -1;
    }
    String inputFile = cmd.getOptionValue("i");
    String processor = cmd.getOptionValue("p", "Web");
    String outputFile = cmd.getOptionValue("o", "-");
    String delimiter = cmd.getOptionValue("delimiter", PBImageDelimitedTextWriter.DEFAULT_DELIMITER);
    String tempPath = cmd.getOptionValue("t", "");
    Configuration conf = new Configuration();
    try (PrintStream out = outputFile.equals("-") ? System.out : new PrintStream(outputFile, "UTF-8")) {
        switch(processor) {
            case "FileDistribution":
                long maxSize = Long.parseLong(cmd.getOptionValue("maxSize", "0"));
                int step = Integer.parseInt(cmd.getOptionValue("step", "0"));
                boolean formatOutput = cmd.hasOption("format");
                new FileDistributionCalculator(conf, maxSize, step, formatOutput, out).visit(new RandomAccessFile(inputFile, "r"));
                break;
            case "XML":
                new PBImageXmlWriter(conf, out).visit(new RandomAccessFile(inputFile, "r"));
                break;
            case "ReverseXML":
                try {
                    OfflineImageReconstructor.run(inputFile, outputFile);
                } catch (Exception e) {
                    System.err.println("OfflineImageReconstructor failed: " + e.getMessage());
                    e.printStackTrace(System.err);
                    System.exit(1);
                }
                break;
            case "Web":
                String addr = cmd.getOptionValue("addr", "localhost:5978");
                try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr(addr))) {
                    viewer.start(inputFile);
                }
                break;
            case "Delimited":
                try (PBImageDelimitedTextWriter writer = new PBImageDelimitedTextWriter(out, delimiter, tempPath)) {
                    writer.visit(new RandomAccessFile(inputFile, "r"));
                }
                break;
            default:
                System.err.println("Invalid processor specified : " + processor);
                printUsage();
                return -1;
        }
        return 0;
    } catch (EOFException e) {
        System.err.println("Input file ended unexpectedly. Exiting");
    } catch (IOException e) {
        System.err.println("Encountered exception.  Exiting: " + e.getMessage());
        e.printStackTrace(System.err);
    }
    return -1;
}
Also used : Options(org.apache.commons.cli.Options) PrintStream(java.io.PrintStream) Configuration(org.apache.hadoop.conf.Configuration) PosixParser(org.apache.commons.cli.PosixParser) IOException(java.io.IOException) IOException(java.io.IOException) EOFException(java.io.EOFException) ParseException(org.apache.commons.cli.ParseException) CommandLine(org.apache.commons.cli.CommandLine) RandomAccessFile(java.io.RandomAccessFile) EOFException(java.io.EOFException) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 28 with RandomAccessFile

use of java.io.RandomAccessFile in project hadoop by apache.

the class FSImageLoader method load.

/**
   * Load fsimage into the memory.
   * @param inputFile the filepath of the fsimage to load.
   * @return FSImageLoader
   * @throws IOException if failed to load fsimage.
   */
static FSImageLoader load(String inputFile) throws IOException {
    Configuration conf = new Configuration();
    RandomAccessFile file = new RandomAccessFile(inputFile, "r");
    if (!FSImageUtil.checkFileFormat(file)) {
        throw new IOException("Unrecognized FSImage");
    }
    FsImageProto.FileSummary summary = FSImageUtil.loadSummary(file);
    try (FileInputStream fin = new FileInputStream(file.getFD())) {
        // Map to record INodeReference to the referred id
        ImmutableList<Long> refIdList = null;
        String[] stringTable = null;
        byte[][] inodes = null;
        Map<Long, long[]> dirmap = null;
        ArrayList<FsImageProto.FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList());
        Collections.sort(sections, new Comparator<FsImageProto.FileSummary.Section>() {

            @Override
            public int compare(FsImageProto.FileSummary.Section s1, FsImageProto.FileSummary.Section s2) {
                FSImageFormatProtobuf.SectionName n1 = FSImageFormatProtobuf.SectionName.fromString(s1.getName());
                FSImageFormatProtobuf.SectionName n2 = FSImageFormatProtobuf.SectionName.fromString(s2.getName());
                if (n1 == null) {
                    return n2 == null ? 0 : -1;
                } else if (n2 == null) {
                    return -1;
                } else {
                    return n1.ordinal() - n2.ordinal();
                }
            }
        });
        for (FsImageProto.FileSummary.Section s : sections) {
            fin.getChannel().position(s.getOffset());
            InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength())));
            if (LOG.isDebugEnabled()) {
                LOG.debug("Loading section " + s.getName() + " length: " + s.getLength());
            }
            switch(FSImageFormatProtobuf.SectionName.fromString(s.getName())) {
                case STRING_TABLE:
                    stringTable = loadStringTable(is);
                    break;
                case INODE:
                    inodes = loadINodeSection(is);
                    break;
                case INODE_REFERENCE:
                    refIdList = loadINodeReferenceSection(is);
                    break;
                case INODE_DIR:
                    dirmap = loadINodeDirectorySection(is, refIdList);
                    break;
                default:
                    break;
            }
        }
        return new FSImageLoader(stringTable, inodes, dirmap);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) BufferedInputStream(java.io.BufferedInputStream) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) CodedInputStream(com.google.protobuf.CodedInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) LimitInputStream(org.apache.hadoop.util.LimitInputStream) FileInputStream(java.io.FileInputStream) RandomAccessFile(java.io.RandomAccessFile) FsImageProto(org.apache.hadoop.hdfs.server.namenode.FsImageProto) BufferedInputStream(java.io.BufferedInputStream)

Example 29 with RandomAccessFile

use of java.io.RandomAccessFile in project hadoop by apache.

the class FSImageTestUtil method getImageFileMD5IgnoringTxId.

/**
   * Calculate the md5sum of an image after zeroing out the transaction ID
   * field in the header. This is useful for tests that want to verify
   * that two checkpoints have identical namespaces.
   */
public static String getImageFileMD5IgnoringTxId(File imageFile) throws IOException {
    File tmpFile = File.createTempFile("hadoop_imagefile_tmp", "fsimage");
    tmpFile.deleteOnExit();
    try {
        Files.copy(imageFile, tmpFile);
        RandomAccessFile raf = new RandomAccessFile(tmpFile, "rw");
        try {
            raf.seek(IMAGE_TXID_POS);
            raf.writeLong(0);
        } finally {
            IOUtils.closeStream(raf);
        }
        return getFileMD5(tmpFile);
    } finally {
        tmpFile.delete();
    }
}
Also used : RandomAccessFile(java.io.RandomAccessFile) RandomAccessFile(java.io.RandomAccessFile) EditLogFile(org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile) FSImageFile(org.apache.hadoop.hdfs.server.namenode.FSImageStorageInspector.FSImageFile) File(java.io.File)

Example 30 with RandomAccessFile

use of java.io.RandomAccessFile in project hadoop by apache.

the class TestFadvisedFileRegion method testCustomShuffleTransfer.

@Test(timeout = 100000)
public void testCustomShuffleTransfer() throws IOException {
    File absLogDir = new File("target", TestFadvisedFileRegion.class.getSimpleName() + "LocDir").getAbsoluteFile();
    String testDirPath = StringUtils.join(Path.SEPARATOR, new String[] { absLogDir.getAbsolutePath(), "testCustomShuffleTransfer" });
    File testDir = new File(testDirPath);
    testDir.mkdirs();
    System.out.println(testDir.getAbsolutePath());
    File inFile = new File(testDir, "fileIn.out");
    File outFile = new File(testDir, "fileOut.out");
    //Initialize input file
    byte[] initBuff = new byte[FILE_SIZE];
    Random rand = new Random();
    rand.nextBytes(initBuff);
    FileOutputStream out = new FileOutputStream(inFile);
    try {
        out.write(initBuff);
    } finally {
        IOUtils.cleanup(LOG, out);
    }
    //define position and count to read from a file region.
    int position = 2 * 1024 * 1024;
    int count = 4 * 1024 * 1024 - 1;
    RandomAccessFile inputFile = null;
    RandomAccessFile targetFile = null;
    WritableByteChannel target = null;
    FadvisedFileRegion fileRegion = null;
    try {
        inputFile = new RandomAccessFile(inFile.getAbsolutePath(), "r");
        targetFile = new RandomAccessFile(outFile.getAbsolutePath(), "rw");
        target = targetFile.getChannel();
        Assert.assertEquals(FILE_SIZE, inputFile.length());
        //create FadvisedFileRegion
        fileRegion = new FadvisedFileRegion(inputFile, position, count, false, 0, null, null, 1024, false);
        //test corner cases
        customShuffleTransferCornerCases(fileRegion, target, count);
        long pos = 0;
        long size;
        while ((size = fileRegion.customShuffleTransfer(target, pos)) > 0) {
            pos += size;
        }
        //assert size
        Assert.assertEquals(count, (int) pos);
        Assert.assertEquals(count, targetFile.length());
    } finally {
        if (fileRegion != null) {
            fileRegion.releaseExternalResources();
        }
        IOUtils.cleanup(LOG, target);
        IOUtils.cleanup(LOG, targetFile);
        IOUtils.cleanup(LOG, inputFile);
    }
    //Read the target file and verify that copy is done correctly
    byte[] buff = new byte[FILE_SIZE];
    FileInputStream in = new FileInputStream(outFile);
    try {
        int total = in.read(buff, 0, count);
        Assert.assertEquals(count, total);
        for (int i = 0; i < count; i++) {
            Assert.assertEquals(initBuff[position + i], buff[i]);
        }
    } finally {
        IOUtils.cleanup(LOG, in);
    }
    //delete files and folders
    inFile.delete();
    outFile.delete();
    testDir.delete();
    absLogDir.delete();
}
Also used : Random(java.util.Random) RandomAccessFile(java.io.RandomAccessFile) FileOutputStream(java.io.FileOutputStream) WritableByteChannel(java.nio.channels.WritableByteChannel) RandomAccessFile(java.io.RandomAccessFile) File(java.io.File) FileInputStream(java.io.FileInputStream) Test(org.junit.Test)

Aggregations

RandomAccessFile (java.io.RandomAccessFile)866 IOException (java.io.IOException)425 File (java.io.File)349 FileChannel (java.nio.channels.FileChannel)133 FileNotFoundException (java.io.FileNotFoundException)84 ByteBuffer (java.nio.ByteBuffer)78 Test (org.junit.Test)78 FileLock (java.nio.channels.FileLock)64 EOFException (java.io.EOFException)50 FileOutputStream (java.io.FileOutputStream)47 FileInputStream (java.io.FileInputStream)40 InputStream (java.io.InputStream)36 MappedByteBuffer (java.nio.MappedByteBuffer)33 Random (java.util.Random)26 ByteArrayInputStream (java.io.ByteArrayInputStream)24 BufferedInputStream (java.io.BufferedInputStream)21 DataInputStream (java.io.DataInputStream)19 ByteArrayOutputStream (java.io.ByteArrayOutputStream)17 Configuration (org.apache.hadoop.conf.Configuration)16 AtomicFile (android.util.AtomicFile)12