Search in sources :

Example 6 with Decompressor

use of org.apache.hadoop.io.compress.Decompressor in project hadoop by apache.

the class CompressionEmulationUtil method getPossiblyDecompressedInputStream.

/**
   * Returns a {@link InputStream} for a file that might be compressed.
   */
static InputStream getPossiblyDecompressedInputStream(Path file, Configuration conf, long offset) throws IOException {
    FileSystem fs = file.getFileSystem(conf);
    if (isCompressionEmulationEnabled(conf) && isInputCompressionEmulationEnabled(conf)) {
        CompressionCodecFactory compressionCodecs = new CompressionCodecFactory(conf);
        CompressionCodec codec = compressionCodecs.getCodec(file);
        if (codec != null) {
            Decompressor decompressor = CodecPool.getDecompressor(codec);
            if (decompressor != null) {
                CompressionInputStream in = codec.createInputStream(fs.open(file), decompressor);
                //     Use SplittableCompressionCodec?
                return (InputStream) in;
            }
        }
    }
    FSDataInputStream in = fs.open(file);
    in.seek(offset);
    return (InputStream) in;
}
Also used : Decompressor(org.apache.hadoop.io.compress.Decompressor) CompressionCodecFactory(org.apache.hadoop.io.compress.CompressionCodecFactory) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) InputStream(java.io.InputStream) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec)

Example 7 with Decompressor

use of org.apache.hadoop.io.compress.Decompressor in project hbase by apache.

the class CellBlockBuilder method decompress.

private ByteBuffer decompress(CompressionCodec compressor, InputStream cellBlockStream, int osInitialSize) throws IOException {
    // GZIPCodec fails w/ NPE if no configuration.
    if (compressor instanceof Configurable) {
        ((Configurable) compressor).setConf(this.conf);
    }
    Decompressor poolDecompressor = CodecPool.getDecompressor(compressor);
    CompressionInputStream cis = compressor.createInputStream(cellBlockStream, poolDecompressor);
    ByteBufferOutputStream bbos;
    try {
        // TODO: This is ugly. The buffer will be resized on us if we guess wrong.
        // TODO: Reuse buffers.
        bbos = new ByteBufferOutputStream(osInitialSize);
        IOUtils.copy(cis, bbos);
        bbos.close();
        return bbos.getByteBuffer();
    } finally {
        CodecPool.returnDecompressor(poolDecompressor);
    }
}
Also used : Decompressor(org.apache.hadoop.io.compress.Decompressor) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) ByteBufferOutputStream(org.apache.hadoop.hbase.io.ByteBufferOutputStream) Configurable(org.apache.hadoop.conf.Configurable)

Example 8 with Decompressor

use of org.apache.hadoop.io.compress.Decompressor in project hbase by apache.

the class Compression method decompress.

/**
   * Decompresses data from the given stream using the configured compression
   * algorithm. It will throw an exception if the dest buffer does not have
   * enough space to hold the decompressed data.
   *
   * @param dest
   *          the output bytes buffer
   * @param destOffset
   *          start writing position of the output buffer
   * @param bufferedBoundedStream
   *          a stream to read compressed data from, bounded to the exact amount
   *          of compressed data
   * @param compressedSize
   *          compressed data size, header not included
   * @param uncompressedSize
   *          uncompressed data size, header not included
   * @param compressAlgo
   *          compression algorithm used
   * @throws IOException
   */
public static void decompress(byte[] dest, int destOffset, InputStream bufferedBoundedStream, int compressedSize, int uncompressedSize, Compression.Algorithm compressAlgo) throws IOException {
    if (dest.length - destOffset < uncompressedSize) {
        throw new IllegalArgumentException("Output buffer does not have enough space to hold " + uncompressedSize + " decompressed bytes, available: " + (dest.length - destOffset));
    }
    Decompressor decompressor = null;
    try {
        decompressor = compressAlgo.getDecompressor();
        InputStream is = compressAlgo.createDecompressionStream(bufferedBoundedStream, decompressor, 0);
        IOUtils.readFully(is, dest, destOffset, uncompressedSize);
        is.close();
    } finally {
        if (decompressor != null) {
            compressAlgo.returnDecompressor(decompressor);
        }
    }
}
Also used : Decompressor(org.apache.hadoop.io.compress.Decompressor) BufferedInputStream(java.io.BufferedInputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) InputStream(java.io.InputStream)

Example 9 with Decompressor

use of org.apache.hadoop.io.compress.Decompressor in project hive by apache.

the class CodecPool method getDecompressor.

/**
   * Get a {@link Decompressor} for the given {@link CompressionCodec} from the
   * pool or a new one.
   * 
   * @param codec
   *          the <code>CompressionCodec</code> for which to get the
   *          <code>Decompressor</code>
   * @return <code>Decompressor</code> for the given
   *         <code>CompressionCodec</code> the pool or a new one
   */
public static Decompressor getDecompressor(CompressionCodec codec) {
    Decompressor decompressor = borrow(DECOMPRESSOR_POOL, codec.getDecompressorType());
    if (decompressor == null) {
        decompressor = codec.createDecompressor();
        LOG.info("Got brand-new decompressor");
    } else {
        LOG.debug("Got recycled decompressor");
    }
    return decompressor;
}
Also used : Decompressor(org.apache.hadoop.io.compress.Decompressor)

Example 10 with Decompressor

use of org.apache.hadoop.io.compress.Decompressor in project hadoop by apache.

the class TestZlibCompressorDecompressor method testZlibCompressorDecompressorSetDictionary.

@Test
public void testZlibCompressorDecompressorSetDictionary() {
    Configuration conf = new Configuration();
    if (ZlibFactory.isNativeZlibLoaded(conf)) {
        Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
        Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
        checkSetDictionaryNullPointerException(zlibCompressor);
        checkSetDictionaryNullPointerException(zlibDecompressor);
        checkSetDictionaryArrayIndexOutOfBoundsException(zlibDecompressor);
        checkSetDictionaryArrayIndexOutOfBoundsException(zlibCompressor);
    } else {
        assertTrue("ZlibFactory is using native libs against request", ZlibFactory.isNativeZlibLoaded(conf));
    }
}
Also used : ZlibDirectDecompressor(org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor) Decompressor(org.apache.hadoop.io.compress.Decompressor) Configuration(org.apache.hadoop.conf.Configuration) Compressor(org.apache.hadoop.io.compress.Compressor) Test(org.junit.Test)

Aggregations

Decompressor (org.apache.hadoop.io.compress.Decompressor)14 Test (org.junit.Test)9 Configuration (org.apache.hadoop.conf.Configuration)7 CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)6 Compressor (org.apache.hadoop.io.compress.Compressor)5 ZlibDirectDecompressor (org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3 IOException (java.io.IOException)3 InputStream (java.io.InputStream)3 ZStandardCodec (org.apache.hadoop.io.compress.ZStandardCodec)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 File (java.io.File)2 URL (java.net.URL)2 HashSet (java.util.HashSet)2 Path (org.apache.hadoop.fs.Path)2 BZip2Codec (org.apache.hadoop.io.compress.BZip2Codec)2 CompressionOutputStream (org.apache.hadoop.io.compress.CompressionOutputStream)2 BufferedInputStream (java.io.BufferedInputStream)1 FileInputStream (java.io.FileInputStream)1 Configurable (org.apache.hadoop.conf.Configurable)1