Search in sources :

Example 11 with CompressionInputStream

use of org.apache.hadoop.io.compress.CompressionInputStream in project presto by prestodb.

the class HadoopDecompressor method decompress.

@Override
public void decompress(Slice compressed, Slice uncompressed) throws RcFileCorruptionException {
    checkState(!destroyed, "Codec has been destroyed");
    decompressor.reset();
    try (CompressionInputStream decompressorStream = codec.createInputStream(compressed.getInput(), decompressor)) {
        uncompressed.setBytes(0, decompressorStream, uncompressed.length());
    } catch (IndexOutOfBoundsException | IOException e) {
        throw new RcFileCorruptionException(e, "Compressed stream is truncated");
    }
}
Also used : CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) IOException(java.io.IOException)

Example 12 with CompressionInputStream

use of org.apache.hadoop.io.compress.CompressionInputStream in project hbase by apache.

the class CellBlockBuilder method decompress.

private ByteBuffer decompress(CompressionCodec compressor, InputStream cellBlockStream, int osInitialSize) throws IOException {
    // GZIPCodec fails w/ NPE if no configuration.
    if (compressor instanceof Configurable) {
        ((Configurable) compressor).setConf(this.conf);
    }
    Decompressor poolDecompressor = CodecPool.getDecompressor(compressor);
    CompressionInputStream cis = compressor.createInputStream(cellBlockStream, poolDecompressor);
    ByteBufferOutputStream bbos;
    try {
        // TODO: This is ugly. The buffer will be resized on us if we guess wrong.
        // TODO: Reuse buffers.
        bbos = new ByteBufferOutputStream(osInitialSize);
        IOUtils.copy(cis, bbos);
        bbos.close();
        return bbos.getByteBuffer();
    } finally {
        CodecPool.returnDecompressor(poolDecompressor);
    }
}
Also used : Decompressor(org.apache.hadoop.io.compress.Decompressor) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) ByteBufferOutputStream(org.apache.hadoop.hbase.io.ByteBufferOutputStream) Configurable(org.apache.hadoop.conf.Configurable)

Example 13 with CompressionInputStream

use of org.apache.hadoop.io.compress.CompressionInputStream in project hbase by apache.

the class CompressionTestBase method codecTest.

protected void codecTest(final CompressionCodec codec, final byte[][] input, final Integer expectedCompressedSize) throws Exception {
    // We do this in Compression.java
    ((Configurable) codec).getConf().setInt("io.file.buffer.size", 32 * 1024);
    // Compress
    long start = EnvironmentEdgeManager.currentTime();
    Compressor compressor = codec.createCompressor();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CompressionOutputStream out = codec.createOutputStream(baos, compressor);
    int inLen = 0;
    for (int i = 0; i < input.length; i++) {
        out.write(input[i]);
        inLen += input[i].length;
    }
    out.close();
    long end = EnvironmentEdgeManager.currentTime();
    final byte[] compressed = baos.toByteArray();
    LOG.info("{} compressed {} bytes to {} bytes in {} ms", codec.getClass().getSimpleName(), inLen, compressed.length, end - start);
    if (expectedCompressedSize != null) {
        assertTrue("Expected compressed size does not match: (expected=" + expectedCompressedSize + ", actual=" + compressed.length + ")", expectedCompressedSize == compressed.length);
    }
    // Decompress
    final byte[] plain = new byte[inLen];
    Decompressor decompressor = codec.createDecompressor();
    CompressionInputStream in = codec.createInputStream(new ByteArrayInputStream(compressed), decompressor);
    start = EnvironmentEdgeManager.currentTime();
    IOUtils.readFully(in, plain, 0, plain.length);
    in.close();
    end = EnvironmentEdgeManager.currentTime();
    LOG.info("{} decompressed {} bytes to {} bytes in {} ms", codec.getClass().getSimpleName(), compressed.length, plain.length, end - start);
    // Decompressed bytes should equal the original
    int offset = 0;
    for (int i = 0; i < input.length; i++) {
        assertTrue("Comparison failed at offset " + offset, Bytes.compareTo(plain, offset, input[i].length, input[i], 0, input[i].length) == 0);
        offset += input[i].length;
    }
}
Also used : CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) Decompressor(org.apache.hadoop.io.compress.Decompressor) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) Compressor(org.apache.hadoop.io.compress.Compressor) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 14 with CompressionInputStream

use of org.apache.hadoop.io.compress.CompressionInputStream in project hadoop by apache.

the class TestZStandardCompressorDecompressor method testCompressorDecompressorLogicWithCompressionStreams.

// test compress/decompress process through
// CompressionOutputStream/CompressionInputStream api
@Test
public void testCompressorDecompressorLogicWithCompressionStreams() throws Exception {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int byteSize = 1024 * 100;
    byte[] bytes = generate(byteSize);
    int bufferSize = IO_FILE_BUFFER_SIZE_DEFAULT;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new CompressorStream(compressedDataBuffer, new ZStandardCompressor(), bufferSize);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new DecompressorStream(deCompressedDataBuffer, new ZStandardDecompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[byteSize];
        inflateIn.read(result);
        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } finally {
        IOUtils.closeQuietly(deflateOut);
        IOUtils.closeQuietly(inflateIn);
    }
}
Also used : DecompressorStream(org.apache.hadoop.io.compress.DecompressorStream) CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) CompressorStream(org.apache.hadoop.io.compress.CompressorStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) DataInputStream(java.io.DataInputStream) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Example 15 with CompressionInputStream

use of org.apache.hadoop.io.compress.CompressionInputStream in project hadoop by apache.

the class TestZStandardCompressorDecompressor method testCompressionCompressesCorrectly.

@Test
public void testCompressionCompressesCorrectly() throws Exception {
    int uncompressedSize = (int) FileUtils.sizeOf(uncompressedFile);
    byte[] bytes = FileUtils.readFileToByteArray(uncompressedFile);
    assertEquals(uncompressedSize, bytes.length);
    Configuration conf = new Configuration();
    ZStandardCodec codec = new ZStandardCodec();
    codec.setConf(conf);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    Compressor compressor = codec.createCompressor();
    CompressionOutputStream outputStream = codec.createOutputStream(baos, compressor);
    for (byte aByte : bytes) {
        outputStream.write(aByte);
    }
    outputStream.finish();
    outputStream.close();
    assertEquals(uncompressedSize, compressor.getBytesRead());
    assertTrue(compressor.finished());
    // just make sure we can decompress the file
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    Decompressor decompressor = codec.createDecompressor();
    CompressionInputStream inputStream = codec.createInputStream(bais, decompressor);
    byte[] buffer = new byte[100];
    int n = buffer.length;
    while ((n = inputStream.read(buffer, 0, n)) != -1) {
        byteArrayOutputStream.write(buffer, 0, n);
    }
    assertArrayEquals(bytes, byteArrayOutputStream.toByteArray());
}
Also used : CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) Decompressor(org.apache.hadoop.io.compress.Decompressor) Configuration(org.apache.hadoop.conf.Configuration) ByteArrayInputStream(java.io.ByteArrayInputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) Compressor(org.apache.hadoop.io.compress.Compressor) ZStandardCodec(org.apache.hadoop.io.compress.ZStandardCodec) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Test(org.junit.Test)

Aggregations

CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)20 Test (org.junit.Test)13 CompressionOutputStream (org.apache.hadoop.io.compress.CompressionOutputStream)9 ByteArrayOutputStream (java.io.ByteArrayOutputStream)8 ByteArrayInputStream (java.io.ByteArrayInputStream)7 Configuration (org.apache.hadoop.conf.Configuration)7 Decompressor (org.apache.hadoop.io.compress.Decompressor)7 CompressionCodec (org.apache.hadoop.io.compress.CompressionCodec)6 FileInputStream (java.io.FileInputStream)5 IOException (java.io.IOException)5 BufferedOutputStream (java.io.BufferedOutputStream)4 ZStandardCodec (org.apache.hadoop.io.compress.ZStandardCodec)4 BufferedInputStream (java.io.BufferedInputStream)3 DataInputStream (java.io.DataInputStream)3 DataOutputStream (java.io.DataOutputStream)3 InputStream (java.io.InputStream)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)3 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)3 Compressor (org.apache.hadoop.io.compress.Compressor)3