Search in sources :

Example 21 with BufferedWriter

use of java.io.BufferedWriter in project hadoop by apache.

the class TestAvroFSInput method testAFSInput.

public void testAFSInput() throws Exception {
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.getLocal(conf);
    Path dir = getInputPath();
    if (!fs.exists(dir)) {
        fs.mkdirs(dir);
    }
    Path filePath = new Path(dir, "foo");
    if (fs.exists(filePath)) {
        fs.delete(filePath, false);
    }
    FSDataOutputStream ostream = fs.create(filePath);
    BufferedWriter w = new BufferedWriter(new OutputStreamWriter(ostream));
    w.write("0123456789");
    w.close();
    // Create the stream
    FileContext fc = FileContext.getFileContext(conf);
    AvroFSInput avroFSIn = new AvroFSInput(fc, filePath);
    assertEquals(10, avroFSIn.length());
    // Check initial position
    byte[] buf = new byte[1];
    assertEquals(0, avroFSIn.tell());
    // Check a read from that position.
    avroFSIn.read(buf, 0, 1);
    assertEquals(1, avroFSIn.tell());
    assertEquals('0', (char) buf[0]);
    // Check a seek + read
    avroFSIn.seek(4);
    assertEquals(4, avroFSIn.tell());
    avroFSIn.read(buf, 0, 1);
    assertEquals('4', (char) buf[0]);
    assertEquals(5, avroFSIn.tell());
    avroFSIn.close();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter)

Example 22 with BufferedWriter

use of java.io.BufferedWriter in project hadoop by apache.

the class TestCodec method testGzipCodecWrite.

private void testGzipCodecWrite(boolean useNative) throws IOException {
    // Create a gzipped file using a compressor from the CodecPool,
    // and try to read it back via the regular GZIPInputStream.
    // Use native libs per the parameter
    Configuration conf = new Configuration();
    if (useNative) {
        assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
    } else {
        assertFalse("ZlibFactory is using native libs against request", ZlibFactory.isNativeZlibLoaded(conf));
    }
    // Ensure that the CodecPool has a BuiltInZlibDeflater in it.
    Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
    assertNotNull("zlibCompressor is null!", zlibCompressor);
    assertTrue("ZlibFactory returned unexpected deflator", useNative ? zlibCompressor instanceof ZlibCompressor : zlibCompressor instanceof BuiltInZlibDeflater);
    CodecPool.returnCompressor(zlibCompressor);
    // Create a GZIP text file via the Compressor interface.
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
    assertTrue("Codec for .gz file is not GzipCodec", codec instanceof GzipCodec);
    final String msg = "This is the message we are going to compress.";
    final String fileName = new Path(GenericTestUtils.getTempPath("testGzipCodecWrite.txt.gz")).toString();
    BufferedWriter w = null;
    Compressor gzipCompressor = CodecPool.getCompressor(codec);
    if (null != gzipCompressor) {
        // If it gives us back a Compressor, we should be able to use this
        // to write files we can then read back with Java's gzip tools.
        OutputStream os = new CompressorStream(new FileOutputStream(fileName), gzipCompressor);
        w = new BufferedWriter(new OutputStreamWriter(os));
        w.write(msg);
        w.close();
        CodecPool.returnCompressor(gzipCompressor);
        verifyGzipFile(fileName, msg);
    }
    // Create a gzip text file via codec.getOutputStream().
    w = new BufferedWriter(new OutputStreamWriter(codec.createOutputStream(new FileOutputStream(fileName))));
    w.write(msg);
    w.close();
    verifyGzipFile(fileName, msg);
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) ZlibCompressor(org.apache.hadoop.io.compress.zlib.ZlibCompressor) DataOutputStream(java.io.DataOutputStream) GZIPOutputStream(java.util.zip.GZIPOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) ZlibCompressor(org.apache.hadoop.io.compress.zlib.ZlibCompressor) BuiltInZlibDeflater(org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater) BufferedWriter(java.io.BufferedWriter) FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter)

Example 23 with BufferedWriter

use of java.io.BufferedWriter in project hadoop by apache.

the class TestCodec method testGzipCodecRead.

@Test
public void testGzipCodecRead() throws IOException {
    // Create a gzipped file and try to read it back, using a decompressor
    // from the CodecPool.
    // Don't use native libs for this test.
    Configuration conf = new Configuration();
    ZlibFactory.setNativeZlibLoaded(false);
    // Ensure that the CodecPool has a BuiltInZlibInflater in it.
    Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
    assertTrue("ZlibFactory returned unexpected inflator", zlibDecompressor instanceof BuiltInZlibInflater);
    CodecPool.returnDecompressor(zlibDecompressor);
    // Now create a GZip text file.
    Path f = new Path(GenericTestUtils.getTempPath("testGzipCodecRead.txt.gz"));
    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(f.toString()))));
    final String msg = "This is the message in the file!";
    bw.write(msg);
    bw.close();
    // Now read it back, using the CodecPool to establish the
    // decompressor to use.
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(f);
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    FileSystem fs = FileSystem.getLocal(conf);
    InputStream is = fs.open(f);
    is = codec.createInputStream(is, decompressor);
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    String line = br.readLine();
    assertEquals("Didn't get the same message back!", msg, line);
    br.close();
}
Also used : BuiltInZlibInflater(org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater) Path(org.apache.hadoop.fs.Path) BuiltInGzipDecompressor(org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor) Configuration(org.apache.hadoop.conf.Configuration) InputStreamReader(java.io.InputStreamReader) GZIPInputStream(java.util.zip.GZIPInputStream) BufferedInputStream(java.io.BufferedInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) BufferedWriter(java.io.BufferedWriter) GZIPOutputStream(java.util.zip.GZIPOutputStream) FileOutputStream(java.io.FileOutputStream) FileSystem(org.apache.hadoop.fs.FileSystem) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) Test(org.junit.Test)

Example 24 with BufferedWriter

use of java.io.BufferedWriter in project hadoop by apache.

the class TestCodec method testGzipLongOverflow.

@Test
public void testGzipLongOverflow() throws IOException {
    LOG.info("testGzipLongOverflow");
    // Don't use native libs for this test.
    Configuration conf = new Configuration();
    ZlibFactory.setNativeZlibLoaded(false);
    assertFalse("ZlibFactory is using native libs against request", ZlibFactory.isNativeZlibLoaded(conf));
    // Ensure that the CodecPool has a BuiltInZlibInflater in it.
    Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
    assertTrue("ZlibFactory returned unexpected inflator", zlibDecompressor instanceof BuiltInZlibInflater);
    CodecPool.returnDecompressor(zlibDecompressor);
    // Now create a GZip text file.
    Path f = new Path(GenericTestUtils.getTempPath("testGzipLongOverflow.bin.gz"));
    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(f.toString()))));
    final int NBUF = 1024 * 4 + 1;
    final char[] buf = new char[1024 * 1024];
    for (int i = 0; i < buf.length; i++) buf[i] = '\0';
    for (int i = 0; i < NBUF; i++) {
        bw.write(buf);
    }
    bw.close();
    // Now read it back, using the CodecPool to establish the
    // decompressor to use.
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(f);
    Decompressor decompressor = CodecPool.getDecompressor(codec);
    FileSystem fs = FileSystem.getLocal(conf);
    InputStream is = fs.open(f);
    is = codec.createInputStream(is, decompressor);
    BufferedReader br = new BufferedReader(new InputStreamReader(is));
    for (int j = 0; j < NBUF; j++) {
        int n = br.read(buf);
        assertEquals("got wrong read length!", n, buf.length);
        for (int i = 0; i < buf.length; i++) assertEquals("got wrong byte!", buf[i], '\0');
    }
    br.close();
}
Also used : BuiltInZlibInflater(org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater) Path(org.apache.hadoop.fs.Path) BuiltInGzipDecompressor(org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor) Configuration(org.apache.hadoop.conf.Configuration) InputStreamReader(java.io.InputStreamReader) GZIPInputStream(java.util.zip.GZIPInputStream) BufferedInputStream(java.io.BufferedInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) BufferedWriter(java.io.BufferedWriter) GZIPOutputStream(java.util.zip.GZIPOutputStream) FileOutputStream(java.io.FileOutputStream) FileSystem(org.apache.hadoop.fs.FileSystem) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) Test(org.junit.Test)

Example 25 with BufferedWriter

use of java.io.BufferedWriter in project hadoop by apache.

the class TestProcfsBasedProcessTree method writeStatFiles.

/**
   * Write stat files under the specified pid directories with data setup in the
   * corresponding ProcessStatInfo objects
   *
   * @param procfsRootDir
   *          root directory of procfs file system
   * @param pids
   *          the PID directories under which to create the stat file
   * @param procs
   *          corresponding ProcessStatInfo objects whose data should be written
   *          to the stat files.
   * @throws IOException
   *           if stat files could not be written
   */
public static void writeStatFiles(File procfsRootDir, String[] pids, ProcessStatInfo[] procs, ProcessTreeSmapMemInfo[] smaps) throws IOException {
    for (int i = 0; i < pids.length; i++) {
        File statFile = new File(new File(procfsRootDir, pids[i]), ProcfsBasedProcessTree.PROCFS_STAT_FILE);
        BufferedWriter bw = null;
        try {
            FileWriter fw = new FileWriter(statFile);
            bw = new BufferedWriter(fw);
            bw.write(procs[i].getStatLine());
            LOG.info("wrote stat file for " + pids[i] + " with contents: " + procs[i].getStatLine());
        } finally {
            // not handling exception - will throw an error and fail the test.
            if (bw != null) {
                bw.close();
            }
        }
        if (smaps != null) {
            File smapFile = new File(new File(procfsRootDir, pids[i]), ProcfsBasedProcessTree.SMAPS);
            bw = null;
            try {
                FileWriter fw = new FileWriter(smapFile);
                bw = new BufferedWriter(fw);
                bw.write(smaps[i].toString());
                bw.flush();
                LOG.info("wrote smap file for " + pids[i] + " with contents: " + smaps[i].toString());
            } finally {
                // not handling exception - will throw an error and fail the test.
                if (bw != null) {
                    bw.close();
                }
            }
        }
    }
}
Also used : FileWriter(java.io.FileWriter) File(java.io.File) BufferedWriter(java.io.BufferedWriter)

Aggregations

BufferedWriter (java.io.BufferedWriter)1557 IOException (java.io.IOException)726 FileWriter (java.io.FileWriter)706 File (java.io.File)629 OutputStreamWriter (java.io.OutputStreamWriter)603 FileOutputStream (java.io.FileOutputStream)291 BufferedReader (java.io.BufferedReader)206 Writer (java.io.Writer)138 InputStreamReader (java.io.InputStreamReader)124 PrintWriter (java.io.PrintWriter)117 ArrayList (java.util.ArrayList)102 FileNotFoundException (java.io.FileNotFoundException)101 Test (org.junit.Test)99 Path (org.apache.hadoop.fs.Path)80 FileReader (java.io.FileReader)78 OutputStream (java.io.OutputStream)65 StringWriter (java.io.StringWriter)58 Path (java.nio.file.Path)58 Date (java.util.Date)57 FileInputStream (java.io.FileInputStream)54