use of java.util.zip.Inflater in project camel by apache.
the class LumberjackFrameDecoder method handleCompressedFrame.
private boolean handleCompressedFrame(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
if (!in.isReadable(FRAME_COMPRESS_HEADER_LENGTH)) {
return false;
}
int compressedPayloadLength = in.readInt();
if (!in.isReadable(compressedPayloadLength)) {
return false;
}
// decompress payload
Inflater inflater = new Inflater();
if (in.hasArray()) {
inflater.setInput(in.array(), in.arrayOffset() + in.readerIndex(), compressedPayloadLength);
in.skipBytes(compressedPayloadLength);
} else {
byte[] array = new byte[compressedPayloadLength];
in.readBytes(array);
inflater.setInput(array);
}
while (!inflater.finished()) {
ByteBuf decompressed = ctx.alloc().heapBuffer(1024, 1024);
byte[] outArray = decompressed.array();
int count = inflater.inflate(outArray, decompressed.arrayOffset(), decompressed.writableBytes());
decompressed.writerIndex(count);
// put data in the pipeline
out.add(decompressed);
}
return true;
}
use of java.util.zip.Inflater in project hadoop by apache.
the class TestConcatenatedCompressedInput method testPrototypeInflaterGzip.
/**
* Test using the raw Inflater codec for reading gzip files.
*/
@Test
public void testPrototypeInflaterGzip() throws IOException {
// used only for file extension
CompressionCodec gzip = new GzipCodec();
// localFs = FileSystem instance
localFs.delete(workDir, true);
System.out.println(COLOR_BR_BLUE + "testPrototypeInflaterGzip() using " + "non-native/Java Inflater and manual gzip header/trailer parsing" + COLOR_NORMAL);
// copy prebuilt (correct!) version of concat.gz to HDFS
final String fn = "concat" + gzip.getDefaultExtension();
Path fnLocal = new Path(System.getProperty("test.concat.data", "/tmp"), fn);
Path fnHDFS = new Path(workDir, fn);
localFs.copyFromLocalFile(fnLocal, fnHDFS);
final FileInputStream in = new FileInputStream(fnLocal.toString());
assertEquals("concat bytes available", 148, in.available());
// should wrap all of this header-reading stuff in a running-CRC wrapper
// (did so in BuiltInGzipDecompressor; see below)
byte[] compressedBuf = new byte[256];
int numBytesRead = in.read(compressedBuf, 0, 10);
assertEquals("header bytes read", 10, numBytesRead);
assertEquals("1st byte", 0x1f, compressedBuf[0] & 0xff);
assertEquals("2nd byte", 0x8b, compressedBuf[1] & 0xff);
assertEquals("3rd byte (compression method)", 8, compressedBuf[2] & 0xff);
byte flags = (byte) (compressedBuf[3] & 0xff);
if ((flags & 0x04) != 0) {
// FEXTRA
numBytesRead = in.read(compressedBuf, 0, 2);
assertEquals("XLEN bytes read", 2, numBytesRead);
int xlen = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff;
in.skip(xlen);
}
if ((flags & 0x08) != 0) {
// FNAME
while ((numBytesRead = in.read()) != 0) {
assertFalse("unexpected end-of-file while reading filename", numBytesRead == -1);
}
}
if ((flags & 0x10) != 0) {
// FCOMMENT
while ((numBytesRead = in.read()) != 0) {
assertFalse("unexpected end-of-file while reading comment", numBytesRead == -1);
}
}
if ((flags & 0xe0) != 0) {
// reserved
assertTrue("reserved bits are set??", (flags & 0xe0) == 0);
}
if ((flags & 0x02) != 0) {
// FHCRC
numBytesRead = in.read(compressedBuf, 0, 2);
assertEquals("CRC16 bytes read", 2, numBytesRead);
int crc16 = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff;
}
// ready to go! next bytes should be start of deflated stream, suitable
// for Inflater
numBytesRead = in.read(compressedBuf);
// Inflater docs refer to a "dummy byte": no clue what that's about;
// appears to work fine without one
byte[] uncompressedBuf = new byte[256];
Inflater inflater = new Inflater(true);
inflater.setInput(compressedBuf, 0, numBytesRead);
try {
int numBytesUncompressed = inflater.inflate(uncompressedBuf);
String outString = new String(uncompressedBuf, 0, numBytesUncompressed, "UTF-8");
System.out.println("uncompressed data of first gzip member = [" + outString + "]");
} catch (java.util.zip.DataFormatException ex) {
throw new IOException(ex.getMessage());
}
in.close();
}
use of java.util.zip.Inflater in project dropwizard by dropwizard.
the class BiDiGzipHandler method buildInflater.
private Inflater buildInflater() {
final Inflater inflater = localInflater.get();
if (inflater != null) {
// The request could fail in the middle of decompressing, so potentially we can get
// a broken inflater in the thread local storage. That's why we need to clear the storage.
localInflater.set(null);
// Reuse the inflater from the thread local storage
inflater.reset();
return inflater;
} else {
return new Inflater(inflateNoWrap);
}
}
use of java.util.zip.Inflater in project jetty.project by eclipse.
the class DeflateFrameExtensionTest method testInflateBasics.
@Test
public void testInflateBasics() throws Exception {
// should result in "info:" text if properly inflated
// what pywebsocket produces
byte[] rawbuf = TypeUtil.fromHexString("CaCc4bCbB70200");
// byte rawbuf[] = TypeUtil.fromHexString("CbCc4bCbB70200"); // what java produces
Inflater inflater = new Inflater(true);
inflater.reset();
inflater.setInput(rawbuf, 0, rawbuf.length);
byte[] outbuf = new byte[64];
int len = inflater.inflate(outbuf);
inflater.end();
Assert.assertThat("Inflated length", len, greaterThan(4));
String actual = StringUtil.toUTF8String(outbuf, 0, len);
Assert.assertThat("Inflated text", actual, is("info:"));
}
use of java.util.zip.Inflater in project elasticsearch by elastic.
the class DeflateCompressor method streamInput.
@Override
public StreamInput streamInput(StreamInput in) throws IOException {
final byte[] headerBytes = new byte[HEADER.length];
int len = 0;
while (len < headerBytes.length) {
final int read = in.read(headerBytes, len, headerBytes.length - len);
if (read == -1) {
break;
}
len += read;
}
if (len != HEADER.length || Arrays.equals(headerBytes, HEADER) == false) {
throw new IllegalArgumentException("Input stream is not compressed with DEFLATE!");
}
final boolean nowrap = true;
final Inflater inflater = new Inflater(nowrap);
InputStream decompressedIn = new InflaterInputStream(in, inflater, BUFFER_SIZE);
decompressedIn = new BufferedInputStream(decompressedIn, BUFFER_SIZE);
return new InputStreamStreamInput(decompressedIn) {
final AtomicBoolean closed = new AtomicBoolean(false);
public void close() throws IOException {
try {
super.close();
} finally {
if (closed.compareAndSet(false, true)) {
// important to release native memory
inflater.end();
}
}
}
};
}
Aggregations