Search in sources :

Example 1 with BlockDecompressorStream

use of org.apache.hadoop.io.compress.BlockDecompressorStream in project hadoop by apache.

the class TestLz4CompressorDecompressor method testCompressorDecompressorEmptyStreamLogic.

// test compress/decompress with empty stream
@Test
public void testCompressorDecompressorEmptyStreamLogic() {
    ByteArrayInputStream bytesIn = null;
    ByteArrayOutputStream bytesOut = null;
    byte[] buf = null;
    BlockDecompressorStream blockDecompressorStream = null;
    try {
        // compress empty stream
        bytesOut = new ByteArrayOutputStream();
        BlockCompressorStream blockCompressorStream = new BlockCompressorStream(bytesOut, new Lz4Compressor(), 1024, 0);
        // close without write
        blockCompressorStream.close();
        // check compressed output
        buf = bytesOut.toByteArray();
        assertEquals("empty stream compressed output size != 4", 4, buf.length);
        // use compressed output as input for decompression
        bytesIn = new ByteArrayInputStream(buf);
        // create decompression stream
        blockDecompressorStream = new BlockDecompressorStream(bytesIn, new Lz4Decompressor(), 1024);
        // no byte is available because stream was closed
        assertEquals("return value is not -1", -1, blockDecompressorStream.read());
    } catch (Exception e) {
        fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage());
    } finally {
        if (blockDecompressorStream != null)
            try {
                bytesIn.close();
                bytesOut.close();
                blockDecompressorStream.close();
            } catch (IOException e) {
            }
    }
}
Also used : Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) ByteArrayInputStream(java.io.ByteArrayInputStream) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) IOException(java.io.IOException) Test(org.junit.Test)

Example 2 with BlockDecompressorStream

use of org.apache.hadoop.io.compress.BlockDecompressorStream in project hadoop by apache.

the class TestLz4CompressorDecompressor method testCompressorDecopressorLogicWithCompressionStreams.

// test compress/decompress process through CompressionOutputStream/CompressionInputStream api 
@Test
public void testCompressorDecopressorLogicWithCompressionStreams() {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int BYTE_SIZE = 1024 * 100;
    byte[] bytes = generate(BYTE_SIZE);
    int bufferSize = 262144;
    int compressionOverhead = (bufferSize / 6) + 32;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer, new Lz4Compressor(bufferSize), bufferSize, compressionOverhead);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer, new Lz4Decompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[BYTE_SIZE];
        inflateIn.read(result);
        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } catch (IOException e) {
        fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!");
    } finally {
        try {
            if (deflateOut != null)
                deflateOut.close();
            if (inflateIn != null)
                inflateIn.close();
        } catch (Exception e) {
        }
    }
}
Also used : CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) IOException(java.io.IOException) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Example 3 with BlockDecompressorStream

use of org.apache.hadoop.io.compress.BlockDecompressorStream in project hadoop by apache.

the class TestSnappyCompressorDecompressor method testSnappyCompressorDecopressorLogicWithCompressionStreams.

@Test
public void testSnappyCompressorDecopressorLogicWithCompressionStreams() {
    int BYTE_SIZE = 1024 * 100;
    byte[] bytes = BytesGenerator.get(BYTE_SIZE);
    int bufferSize = 262144;
    int compressionOverhead = (bufferSize / 6) + 32;
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer, new SnappyCompressor(bufferSize), bufferSize, compressionOverhead);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer, new SnappyDecompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[BYTE_SIZE];
        inflateIn.read(result);
        Assert.assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } catch (IOException e) {
        fail("testSnappyCompressorDecopressorLogicWithCompressionStreams ex error !!!");
    } finally {
        try {
            if (deflateOut != null)
                deflateOut.close();
            if (inflateIn != null)
                inflateIn.close();
        } catch (Exception e) {
        }
    }
}
Also used : CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) IOException(java.io.IOException) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Example 4 with BlockDecompressorStream

use of org.apache.hadoop.io.compress.BlockDecompressorStream in project hadoop by apache.

the class TestSnappyCompressorDecompressor method testCompressorDecompressorEmptyStreamLogic.

@Test
public void testCompressorDecompressorEmptyStreamLogic() {
    ByteArrayInputStream bytesIn = null;
    ByteArrayOutputStream bytesOut = null;
    byte[] buf = null;
    BlockDecompressorStream blockDecompressorStream = null;
    try {
        // compress empty stream
        bytesOut = new ByteArrayOutputStream();
        BlockCompressorStream blockCompressorStream = new BlockCompressorStream(bytesOut, new SnappyCompressor(), 1024, 0);
        // close without write
        blockCompressorStream.close();
        // check compressed output
        buf = bytesOut.toByteArray();
        assertEquals("empty stream compressed output size != 4", 4, buf.length);
        // use compressed output as input for decompression
        bytesIn = new ByteArrayInputStream(buf);
        // create decompression stream
        blockDecompressorStream = new BlockDecompressorStream(bytesIn, new SnappyDecompressor(), 1024);
        // no byte is available because stream was closed
        assertEquals("return value is not -1", -1, blockDecompressorStream.read());
    } catch (Exception e) {
        fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage());
    } finally {
        if (blockDecompressorStream != null)
            try {
                bytesIn.close();
                bytesOut.close();
                blockDecompressorStream.close();
            } catch (IOException e) {
            }
    }
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

IOException (java.io.IOException)4 BlockCompressorStream (org.apache.hadoop.io.compress.BlockCompressorStream)4 BlockDecompressorStream (org.apache.hadoop.io.compress.BlockDecompressorStream)4 Test (org.junit.Test)4 BufferedInputStream (java.io.BufferedInputStream)2 BufferedOutputStream (java.io.BufferedOutputStream)2 ByteArrayInputStream (java.io.ByteArrayInputStream)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 DataInputStream (java.io.DataInputStream)2 DataOutputStream (java.io.DataOutputStream)2 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)2 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)2 CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)2 CompressionOutputStream (org.apache.hadoop.io.compress.CompressionOutputStream)2 Lz4Compressor (org.apache.hadoop.io.compress.lz4.Lz4Compressor)2 Lz4Decompressor (org.apache.hadoop.io.compress.lz4.Lz4Decompressor)2