Search in sources :

Example 1 with Lz4Compressor

use of org.apache.hadoop.io.compress.lz4.Lz4Compressor in project hadoop by apache.

the class TestCompressorDecompressor method testCompressorDecompressorWithExeedBufferLimit.

@Test
public void testCompressorDecompressorWithExeedBufferLimit() {
    int BYTE_SIZE = 100 * 1024;
    byte[] rawData = generate(BYTE_SIZE);
    try {
        CompressDecompressTester.of(rawData).withCompressDecompressPair(new SnappyCompressor(BYTE_SIZE + BYTE_SIZE / 2), new SnappyDecompressor(BYTE_SIZE + BYTE_SIZE / 2)).withCompressDecompressPair(new Lz4Compressor(BYTE_SIZE), new Lz4Decompressor(BYTE_SIZE)).withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK, CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS, CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM)).test();
    } catch (Exception ex) {
        GenericTestUtils.assertExceptionContains("testCompressorDecompressorWithExeedBufferLimit error !!!", ex);
    }
}
Also used : SnappyDecompressor(org.apache.hadoop.io.compress.snappy.SnappyDecompressor) SnappyCompressor(org.apache.hadoop.io.compress.snappy.SnappyCompressor) Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) Test(org.junit.Test)

Example 2 with Lz4Compressor

use of org.apache.hadoop.io.compress.lz4.Lz4Compressor in project hadoop by apache.

the class TestLz4CompressorDecompressor method testCompressorDecompressorEmptyStreamLogic.

// test compress/decompress with empty stream
@Test
public void testCompressorDecompressorEmptyStreamLogic() {
    ByteArrayInputStream bytesIn = null;
    ByteArrayOutputStream bytesOut = null;
    byte[] buf = null;
    BlockDecompressorStream blockDecompressorStream = null;
    try {
        // compress empty stream
        bytesOut = new ByteArrayOutputStream();
        BlockCompressorStream blockCompressorStream = new BlockCompressorStream(bytesOut, new Lz4Compressor(), 1024, 0);
        // close without write
        blockCompressorStream.close();
        // check compressed output
        buf = bytesOut.toByteArray();
        assertEquals("empty stream compressed output size != 4", 4, buf.length);
        // use compressed output as input for decompression
        bytesIn = new ByteArrayInputStream(buf);
        // create decompression stream
        blockDecompressorStream = new BlockDecompressorStream(bytesIn, new Lz4Decompressor(), 1024);
        // no byte is available because stream was closed
        assertEquals("return value is not -1", -1, blockDecompressorStream.read());
    } catch (Exception e) {
        fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage());
    } finally {
        if (blockDecompressorStream != null)
            try {
                bytesIn.close();
                bytesOut.close();
                blockDecompressorStream.close();
            } catch (IOException e) {
            }
    }
}
Also used : Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) ByteArrayInputStream(java.io.ByteArrayInputStream) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) IOException(java.io.IOException) Test(org.junit.Test)

Example 3 with Lz4Compressor

use of org.apache.hadoop.io.compress.lz4.Lz4Compressor in project hadoop by apache.

the class TestLz4CompressorDecompressor method testCompressorDecopressorLogicWithCompressionStreams.

// test compress/decompress process through CompressionOutputStream/CompressionInputStream api 
@Test
public void testCompressorDecopressorLogicWithCompressionStreams() {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int BYTE_SIZE = 1024 * 100;
    byte[] bytes = generate(BYTE_SIZE);
    int bufferSize = 262144;
    int compressionOverhead = (bufferSize / 6) + 32;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new BlockCompressorStream(compressedDataBuffer, new Lz4Compressor(bufferSize), bufferSize, compressionOverhead);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new BlockDecompressorStream(deCompressedDataBuffer, new Lz4Decompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[BYTE_SIZE];
        inflateIn.read(result);
        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } catch (IOException e) {
        fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!");
    } finally {
        try {
            if (deflateOut != null)
                deflateOut.close();
            if (inflateIn != null)
                inflateIn.close();
        } catch (Exception e) {
        }
    }
}
Also used : CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) BlockDecompressorStream(org.apache.hadoop.io.compress.BlockDecompressorStream) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) IOException(java.io.IOException) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) BlockCompressorStream(org.apache.hadoop.io.compress.BlockCompressorStream) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Example 4 with Lz4Compressor

use of org.apache.hadoop.io.compress.lz4.Lz4Compressor in project hadoop by apache.

the class TestLz4CompressorDecompressor method testCompressDecompress.

// test compress/decompress process 
@Test
public void testCompressDecompress() {
    int BYTE_SIZE = 1024 * 54;
    byte[] bytes = generate(BYTE_SIZE);
    Lz4Compressor compressor = new Lz4Compressor();
    try {
        compressor.setInput(bytes, 0, bytes.length);
        assertTrue("Lz4CompressDecompress getBytesRead error !!!", compressor.getBytesRead() > 0);
        assertTrue("Lz4CompressDecompress getBytesWritten before compress error !!!", compressor.getBytesWritten() == 0);
        byte[] compressed = new byte[BYTE_SIZE];
        int cSize = compressor.compress(compressed, 0, compressed.length);
        assertTrue("Lz4CompressDecompress getBytesWritten after compress error !!!", compressor.getBytesWritten() > 0);
        Lz4Decompressor decompressor = new Lz4Decompressor();
        // set as input for decompressor only compressed data indicated with cSize
        decompressor.setInput(compressed, 0, cSize);
        byte[] decompressed = new byte[BYTE_SIZE];
        decompressor.decompress(decompressed, 0, decompressed.length);
        assertTrue("testLz4CompressDecompress finished error !!!", decompressor.finished());
        assertArrayEquals(bytes, decompressed);
        compressor.reset();
        decompressor.reset();
        assertTrue("decompressor getRemaining error !!!", decompressor.getRemaining() == 0);
    } catch (Exception e) {
        fail("testLz4CompressDecompress ex error!!!");
    }
}
Also used : Lz4Compressor(org.apache.hadoop.io.compress.lz4.Lz4Compressor) Lz4Decompressor(org.apache.hadoop.io.compress.lz4.Lz4Decompressor) IOException(java.io.IOException) Test(org.junit.Test)

Example 5 with Lz4Compressor

use of org.apache.hadoop.io.compress.lz4.Lz4Compressor in project SilverKing by Morgan-Stanley.

the class LZ4 method compress.

public byte[] compress(byte[] rawValue, int offset, int length) throws IOException {
    LZ4Compressor compressor;
    int maxCompressedLength;
    byte[] compressed;
    int compressedLength;
    byte[] buf;
    compressor = factory.fastCompressor();
    maxCompressedLength = compressor.maxCompressedLength(length);
    compressed = new byte[maxCompressedLength];
    compressedLength = compressor.compress(rawValue, 0, length, compressed, 0, maxCompressedLength);
    buf = new byte[compressedLength];
    System.arraycopy(compressed, 0, buf, 0, compressedLength);
    // FUTURE - eliminate the copy
    return buf;
}
Also used : LZ4Compressor(net.jpountz.lz4.LZ4Compressor)

Aggregations

IOException (java.io.IOException)11 LZ4Compressor (net.jpountz.lz4.LZ4Compressor)10 Lz4Compressor (org.apache.hadoop.io.compress.lz4.Lz4Compressor)10 Test (org.junit.Test)10 Lz4Decompressor (org.apache.hadoop.io.compress.lz4.Lz4Decompressor)5 ByteBuffer (java.nio.ByteBuffer)3 Lz4Compressor (io.airlift.compress.lz4.Lz4Compressor)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 DataInputStream (java.io.DataInputStream)2 DataOutputStream (java.io.DataOutputStream)2 LZ4Exception (net.jpountz.lz4.LZ4Exception)2 BlockCompressorStream (org.apache.hadoop.io.compress.BlockCompressorStream)2 BlockDecompressorStream (org.apache.hadoop.io.compress.BlockDecompressorStream)2 DeflateCompressor (com.facebook.presto.orc.zlib.DeflateCompressor)1 InflateDecompressor (com.facebook.presto.orc.zlib.InflateDecompressor)1 ZstdJniCompressor (com.facebook.presto.orc.zstd.ZstdJniCompressor)1 ZstdJniDecompressor (com.facebook.presto.orc.zstd.ZstdJniDecompressor)1 PrestoException (com.facebook.presto.spi.PrestoException)1 PageCompressor (com.facebook.presto.spi.page.PageCompressor)1 PageDecompressor (com.facebook.presto.spi.page.PageDecompressor)1