Search in sources :

Example 1 with DecompressorStream

use of org.apache.hadoop.io.compress.DecompressorStream in project hadoop by apache.

the class TestZlibCompressorDecompressor method testBuiltInGzipDecompressorExceptions.

@Test
public void testBuiltInGzipDecompressorExceptions() {
    BuiltInGzipDecompressor decompresser = new BuiltInGzipDecompressor();
    try {
        decompresser.setInput(null, 0, 1);
    } catch (NullPointerException ex) {
    // expected
    } catch (Exception ex) {
        fail("testBuiltInGzipDecompressorExceptions npe error " + ex);
    }
    try {
        decompresser.setInput(new byte[] { 0 }, 0, -1);
    } catch (ArrayIndexOutOfBoundsException ex) {
    // expected
    } catch (Exception ex) {
        fail("testBuiltInGzipDecompressorExceptions aioob error" + ex);
    }
    assertTrue("decompresser.getBytesRead error", decompresser.getBytesRead() == 0);
    assertTrue("decompresser.getRemaining error", decompresser.getRemaining() == 0);
    decompresser.reset();
    decompresser.end();
    InputStream decompStream = null;
    try {
        // invalid 0 and 1 bytes , must be 31, -117
        int buffSize = 1 * 1024;
        byte[] buffer = new byte[buffSize];
        Decompressor decompressor = new BuiltInGzipDecompressor();
        DataInputBuffer gzbuf = new DataInputBuffer();
        decompStream = new DecompressorStream(gzbuf, decompressor);
        gzbuf.reset(new byte[] { 0, 0, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
        decompStream.read(buffer);
    } catch (IOException ioex) {
    // expected
    } catch (Exception ex) {
        fail("invalid 0 and 1 byte in gzip stream" + ex);
    }
    // invalid 2 byte, must be 8
    try {
        int buffSize = 1 * 1024;
        byte[] buffer = new byte[buffSize];
        Decompressor decompressor = new BuiltInGzipDecompressor();
        DataInputBuffer gzbuf = new DataInputBuffer();
        decompStream = new DecompressorStream(gzbuf, decompressor);
        gzbuf.reset(new byte[] { 31, -117, 7, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
        decompStream.read(buffer);
    } catch (IOException ioex) {
    // expected
    } catch (Exception ex) {
        fail("invalid 2 byte in gzip stream" + ex);
    }
    try {
        int buffSize = 1 * 1024;
        byte[] buffer = new byte[buffSize];
        Decompressor decompressor = new BuiltInGzipDecompressor();
        DataInputBuffer gzbuf = new DataInputBuffer();
        decompStream = new DecompressorStream(gzbuf, decompressor);
        gzbuf.reset(new byte[] { 31, -117, 8, -32, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
        decompStream.read(buffer);
    } catch (IOException ioex) {
    // expected
    } catch (Exception ex) {
        fail("invalid 3 byte in gzip stream" + ex);
    }
    try {
        int buffSize = 1 * 1024;
        byte[] buffer = new byte[buffSize];
        Decompressor decompressor = new BuiltInGzipDecompressor();
        DataInputBuffer gzbuf = new DataInputBuffer();
        decompStream = new DecompressorStream(gzbuf, decompressor);
        gzbuf.reset(new byte[] { 31, -117, 8, 4, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
        decompStream.read(buffer);
    } catch (IOException ioex) {
    // expected
    } catch (Exception ex) {
        fail("invalid 3 byte make hasExtraField" + ex);
    }
}
Also used : DecompressorStream(org.apache.hadoop.io.compress.DecompressorStream) ZlibDirectDecompressor(org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor) Decompressor(org.apache.hadoop.io.compress.Decompressor) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) InputStream(java.io.InputStream) IOException(java.io.IOException) IOException(java.io.IOException) Test(org.junit.Test)

Example 2 with DecompressorStream

use of org.apache.hadoop.io.compress.DecompressorStream in project hadoop by apache.

the class TestZStandardCompressorDecompressor method testCompressorDecompressorLogicWithCompressionStreams.

// test compress/decompress process through
// CompressionOutputStream/CompressionInputStream api
@Test
public void testCompressorDecompressorLogicWithCompressionStreams() throws Exception {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int byteSize = 1024 * 100;
    byte[] bytes = generate(byteSize);
    int bufferSize = IO_FILE_BUFFER_SIZE_DEFAULT;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new CompressorStream(compressedDataBuffer, new ZStandardCompressor(), bufferSize);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new DecompressorStream(deCompressedDataBuffer, new ZStandardDecompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[byteSize];
        inflateIn.read(result);
        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } finally {
        IOUtils.closeQuietly(deflateOut);
        IOUtils.closeQuietly(inflateIn);
    }
}
Also used : DecompressorStream(org.apache.hadoop.io.compress.DecompressorStream) CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) CompressorStream(org.apache.hadoop.io.compress.CompressorStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) DataInputStream(java.io.DataInputStream) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Aggregations

DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)2 DecompressorStream (org.apache.hadoop.io.compress.DecompressorStream)2 Test (org.junit.Test)2 BufferedInputStream (java.io.BufferedInputStream)1 BufferedOutputStream (java.io.BufferedOutputStream)1 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)1 CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)1 CompressionOutputStream (org.apache.hadoop.io.compress.CompressionOutputStream)1 CompressorStream (org.apache.hadoop.io.compress.CompressorStream)1 Decompressor (org.apache.hadoop.io.compress.Decompressor)1 ZlibDirectDecompressor (org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor)1