use of net.jpountz.lz4.LZ4Exception in project vespa by vespa-engine.
the class NormalSketch method onSerialize.
/**
* Serializes the Sketch.
*
* Serialization format
* ==================
* Original size: 4 bytes
* Compressed size: 4 bytes
* Compressed data: N * 1 bytes
*
* Invariant:
* compressed size <= original size
*
* Special case:
* compressed size == original size => data is uncompressed
*
* @param buf Serializer
*/
@Override
protected void onSerialize(Serializer buf) {
super.onSerialize(buf);
buf.putInt(null, data.length);
try {
LZ4Compressor c = LZ4Factory.safeInstance().highCompressor();
byte[] compressedData = new byte[data.length];
int compressedSize = c.compress(data, compressedData);
serializeDataArray(compressedData, compressedSize, buf);
} catch (LZ4Exception e) {
// LZ4Compressor.compress will throw this exception if it is unable to compress
// into compressedData (when compressed size >= original size)
serializeDataArray(data, data.length, buf);
}
}
use of net.jpountz.lz4.LZ4Exception in project cassandra by apache.
the class LZ4Compressor method compress.
public void compress(ByteBuffer input, ByteBuffer output) throws IOException {
int len = input.remaining();
output.put((byte) len);
output.put((byte) (len >>> 8));
output.put((byte) (len >>> 16));
output.put((byte) (len >>> 24));
try {
compressor.compress(input, output);
} catch (LZ4Exception e) {
throw new IOException(e);
}
}
use of net.jpountz.lz4.LZ4Exception in project cassandra by apache.
the class LZ4Compressor method uncompress.
public int uncompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset) throws IOException {
final int decompressedLength = (input[inputOffset] & 0xFF) | ((input[inputOffset + 1] & 0xFF) << 8) | ((input[inputOffset + 2] & 0xFF) << 16) | ((input[inputOffset + 3] & 0xFF) << 24);
final int writtenLength;
try {
writtenLength = decompressor.decompress(input, inputOffset + INTEGER_BYTES, inputLength - INTEGER_BYTES, output, outputOffset, decompressedLength);
} catch (LZ4Exception e) {
throw new IOException(e);
}
if (writtenLength != decompressedLength) {
throw new IOException("Decompressed lengths mismatch");
}
return decompressedLength;
}
use of net.jpountz.lz4.LZ4Exception in project netty by netty.
the class Lz4FrameDecoder method decode.
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
try {
switch(currentState) {
case INIT_BLOCK:
if (in.readableBytes() < HEADER_LENGTH) {
break;
}
final long magic = in.readLong();
if (magic != MAGIC_NUMBER) {
throw new DecompressionException("unexpected block identifier");
}
final int token = in.readByte();
final int compressionLevel = (token & 0x0F) + COMPRESSION_LEVEL_BASE;
int blockType = token & 0xF0;
int compressedLength = Integer.reverseBytes(in.readInt());
if (compressedLength < 0 || compressedLength > MAX_BLOCK_SIZE) {
throw new DecompressionException(String.format("invalid compressedLength: %d (expected: 0-%d)", compressedLength, MAX_BLOCK_SIZE));
}
int decompressedLength = Integer.reverseBytes(in.readInt());
final int maxDecompressedLength = 1 << compressionLevel;
if (decompressedLength < 0 || decompressedLength > maxDecompressedLength) {
throw new DecompressionException(String.format("invalid decompressedLength: %d (expected: 0-%d)", decompressedLength, maxDecompressedLength));
}
if (decompressedLength == 0 && compressedLength != 0 || decompressedLength != 0 && compressedLength == 0 || blockType == BLOCK_TYPE_NON_COMPRESSED && decompressedLength != compressedLength) {
throw new DecompressionException(String.format("stream corrupted: compressedLength(%d) and decompressedLength(%d) mismatch", compressedLength, decompressedLength));
}
int currentChecksum = Integer.reverseBytes(in.readInt());
if (decompressedLength == 0 && compressedLength == 0) {
if (currentChecksum != 0) {
throw new DecompressionException("stream corrupted: checksum error");
}
currentState = State.FINISHED;
decompressor = null;
checksum = null;
break;
}
this.blockType = blockType;
this.compressedLength = compressedLength;
this.decompressedLength = decompressedLength;
this.currentChecksum = currentChecksum;
currentState = State.DECOMPRESS_DATA;
// fall through
case DECOMPRESS_DATA:
blockType = this.blockType;
compressedLength = this.compressedLength;
decompressedLength = this.decompressedLength;
currentChecksum = this.currentChecksum;
if (in.readableBytes() < compressedLength) {
break;
}
final ByteBufChecksum checksum = this.checksum;
ByteBuf uncompressed = null;
try {
switch(blockType) {
case BLOCK_TYPE_NON_COMPRESSED:
// Just pass through, we not update the readerIndex yet as we do this outside of the
// switch statement.
uncompressed = in.retainedSlice(in.readerIndex(), decompressedLength);
break;
case BLOCK_TYPE_COMPRESSED:
uncompressed = ctx.alloc().buffer(decompressedLength, decompressedLength);
decompressor.decompress(CompressionUtil.safeNioBuffer(in), uncompressed.internalNioBuffer(uncompressed.writerIndex(), decompressedLength));
// Update the writerIndex now to reflect what we decompressed.
uncompressed.writerIndex(uncompressed.writerIndex() + decompressedLength);
break;
default:
throw new DecompressionException(String.format("unexpected blockType: %d (expected: %d or %d)", blockType, BLOCK_TYPE_NON_COMPRESSED, BLOCK_TYPE_COMPRESSED));
}
// Skip inbound bytes after we processed them.
in.skipBytes(compressedLength);
if (checksum != null) {
CompressionUtil.checkChecksum(checksum, uncompressed, currentChecksum);
}
out.add(uncompressed);
uncompressed = null;
currentState = State.INIT_BLOCK;
} catch (LZ4Exception e) {
throw new DecompressionException(e);
} finally {
if (uncompressed != null) {
uncompressed.release();
}
}
break;
case FINISHED:
case CORRUPTED:
in.skipBytes(in.readableBytes());
break;
default:
throw new IllegalStateException();
}
} catch (Exception e) {
currentState = State.CORRUPTED;
throw e;
}
}
use of net.jpountz.lz4.LZ4Exception in project oap by oaplatform.
the class KafkaLZ4BlockInputStream method readBlock.
/**
* Decompresses (if necessary) buffered data, optionally computes and validates a XXHash32 checksum, and writes the
* result to a buffer.
*
* @throws IOException
*/
private void readBlock() throws IOException {
int blockSize = KafkaLZ4BlockUtils.readUnsignedIntLE(in);
// Check for EndMark
if (blockSize == 0) {
finished = true;
if (flg.isContentChecksumSet())
// TODO: verify this content checksum
KafkaLZ4BlockUtils.readUnsignedIntLE(in);
return;
} else if (blockSize > maxBlockSize) {
throw new IOException(String.format("Block size %s exceeded max: %s", blockSize, maxBlockSize));
}
boolean compressed = (blockSize & LZ4_FRAME_INCOMPRESSIBLE_MASK) == 0;
byte[] bufferToRead;
if (compressed) {
bufferToRead = compressedBuffer;
} else {
blockSize &= ~LZ4_FRAME_INCOMPRESSIBLE_MASK;
bufferToRead = buffer;
bufferSize = blockSize;
}
if (in.read(bufferToRead, 0, blockSize) != blockSize) {
throw new IOException(PREMATURE_EOS);
}
// verify checksum
if (flg.isBlockChecksumSet() && KafkaLZ4BlockUtils.readUnsignedIntLE(in) != checksum.hash(bufferToRead, 0, blockSize, 0)) {
throw new IOException(BLOCK_HASH_MISMATCH);
}
if (compressed) {
try {
bufferSize = decompressor.decompress(compressedBuffer, 0, blockSize, buffer, 0, maxBlockSize);
} catch (LZ4Exception e) {
throw new IOException(e);
}
}
bufferOffset = 0;
}
Aggregations