use of tech.pegasys.teku.networking.eth2.rpc.core.encodings.compression.exceptions.CompressionException in project teku by ConsenSys.
the class SnappyCompressorTest method uncompress_truncatedPayload.
@Test
public void uncompress_truncatedPayload() throws CompressionException {
final BeaconState state = dataStructureUtil.randomBeaconState(0);
final Bytes serializedState = state.sszSerialize();
// Compress and deliver only part of the payload
final int payloadSize = serializedState.size();
final Bytes compressed = compressor.compress(serializedState.slice(1));
Decompressor decompressor = new SnappyFramedCompressor().createDecompressor(payloadSize);
assertThat(decompressor.decodeOneMessage(Utils.toByteBuf(compressed))).isEmpty();
assertThatThrownBy(decompressor::complete).isInstanceOf(PayloadSmallerThanExpectedException.class);
}
use of tech.pegasys.teku.networking.eth2.rpc.core.encodings.compression.exceptions.CompressionException in project teku by ConsenSys.
the class LengthPrefixedPayloadDecoder method decodeOneMessage.
@Override
public Optional<T> decodeOneMessage(final ByteBuf in) throws RpcException {
if (disposed) {
throw new IllegalStateException("Trying to reuse disposed LengthPrefixedPayloadDecoder");
}
if (!in.isReadable()) {
return Optional.empty();
}
if (decoded) {
throw new RpcException.ExtraDataAppendedException();
}
if (decompressor.isEmpty()) {
final Optional<Integer> maybeLength = readLengthPrefixHeader(in);
if (maybeLength.isPresent()) {
final int length = maybeLength.get();
if (!payloadEncoder.isLengthWithinBounds(length)) {
throw new LengthOutOfBoundsException();
}
decompressor = Optional.of(compressor.createDecompressor(length));
}
}
if (decompressor.isPresent()) {
final Optional<ByteBuf> ret;
try {
ret = decompressor.get().decodeOneMessage(in);
} catch (PayloadSmallerThanExpectedException e) {
throw new PayloadTruncatedException();
} catch (PayloadLargerThanExpectedException e) {
throw new ExtraDataAppendedException();
} catch (CompressionException e) {
throw new DecompressFailedException();
}
if (ret.isPresent()) {
decompressor = Optional.empty();
try {
// making a copy here since the Bytes.wrapByteBuf(buf).slice(...)
// would be broken after [in] buffer is released
byte[] arr = new byte[ret.get().readableBytes()];
ret.get().readBytes(arr);
Bytes bytes = Bytes.wrap(arr);
decoded = true;
return Optional.of(payloadEncoder.decode(bytes));
} finally {
ret.get().release();
}
} else {
return Optional.empty();
}
} else {
return Optional.empty();
}
}
use of tech.pegasys.teku.networking.eth2.rpc.core.encodings.compression.exceptions.CompressionException in project teku by ConsenSys.
the class SnappyFrameDecoder method decodeOneImpl.
@Override
protected Optional<ByteBuf> decodeOneImpl(ByteBuf in) throws CompressionException {
if (corrupted) {
in.skipBytes(in.readableBytes());
return Optional.empty();
}
ByteBuf ret = null;
try {
int idx = in.readerIndex();
final int inSize = in.readableBytes();
if (inSize < 4) {
// and the length of the chunk (3 bytes) in order to proceed
return Optional.empty();
}
final int chunkTypeVal = in.getUnsignedByte(idx);
final ChunkType chunkType = mapChunkType((byte) chunkTypeVal);
final int chunkLength = in.getUnsignedMediumLE(idx + 1);
switch(chunkType) {
case STREAM_IDENTIFIER:
if (started) {
throw new CompressionException("Extra Snappy stream identifier");
}
if (chunkLength != SNAPPY_IDENTIFIER_LEN) {
throw new CompressionException("Unexpected length of stream identifier: " + chunkLength);
}
if (inSize < 4 + SNAPPY_IDENTIFIER_LEN) {
break;
}
in.skipBytes(4);
int offset = in.readerIndex();
in.skipBytes(SNAPPY_IDENTIFIER_LEN);
checkByte(in.getByte(offset++), (byte) 's');
checkByte(in.getByte(offset++), (byte) 'N');
checkByte(in.getByte(offset++), (byte) 'a');
checkByte(in.getByte(offset++), (byte) 'P');
checkByte(in.getByte(offset++), (byte) 'p');
checkByte(in.getByte(offset), (byte) 'Y');
started = true;
break;
case RESERVED_SKIPPABLE:
if (!started) {
throw new CompressionException("Received RESERVED_SKIPPABLE tag before STREAM_IDENTIFIER");
}
if (inSize < 4 + chunkLength) {
return Optional.empty();
}
in.skipBytes(4 + chunkLength);
break;
case RESERVED_UNSKIPPABLE:
// correctly
throw new CompressionException("Found reserved unskippable chunk type: 0x" + Integer.toHexString(chunkTypeVal));
case UNCOMPRESSED_DATA:
if (!started) {
throw new CompressionException("Received UNCOMPRESSED_DATA tag before STREAM_IDENTIFIER");
}
if (chunkLength > MAX_UNCOMPRESSED_DATA_SIZE) {
throw new CompressionException("Received UNCOMPRESSED_DATA larger than 65540 bytes");
}
if (inSize < 4 + chunkLength) {
return Optional.empty();
}
in.skipBytes(4);
if (validateChecksums) {
int checksum = in.readIntLE();
validateChecksum(checksum, in, in.readerIndex(), chunkLength - 4);
} else {
in.skipBytes(4);
}
ret = in.readRetainedSlice(chunkLength - 4);
break;
case COMPRESSED_DATA:
if (!started) {
throw new CompressionException("Received COMPRESSED_DATA tag before STREAM_IDENTIFIER");
}
if (chunkLength > MAX_COMPRESSED_CHUNK_SIZE) {
throw new DecompressionException("Received COMPRESSED_DATA that contains" + " chunk that exceeds " + MAX_COMPRESSED_CHUNK_SIZE + " bytes");
}
if (inSize < 4 + chunkLength) {
return Optional.empty();
}
in.skipBytes(4);
int checksum = in.readIntLE();
ByteBuf uncompressed = Unpooled.buffer(chunkLength, MAX_DECOMPRESSED_DATA_SIZE);
try {
if (validateChecksums) {
int oldWriterIndex = in.writerIndex();
try {
in.writerIndex(in.readerIndex() + chunkLength - 4);
snappy.decode(in, uncompressed);
} finally {
in.writerIndex(oldWriterIndex);
}
validateChecksum(checksum, uncompressed, 0, uncompressed.writerIndex());
} else {
snappy.decode(in.readSlice(chunkLength - 4), uncompressed);
}
ret = uncompressed;
uncompressed = null;
} finally {
if (uncompressed != null) {
uncompressed.release();
}
}
snappy.reset();
break;
}
} catch (CompressionException e) {
corrupted = true;
throw e;
}
return Optional.ofNullable(ret);
}
use of tech.pegasys.teku.networking.eth2.rpc.core.encodings.compression.exceptions.CompressionException in project teku by ConsenSys.
the class SnappyCompressorTest method uncompress_invalidData.
@Test
public void uncompress_invalidData() {
final BeaconState state = dataStructureUtil.randomBeaconState(0);
final Bytes serializedState = state.sszSerialize();
List<List<ByteBuf>> testSlices = Utils.generateTestSlices(serializedState);
for (List<ByteBuf> testSlice : testSlices) {
Decompressor decompressor = new SnappyFramedCompressor().createDecompressor(serializedState.size());
boolean exceptionCaught = false;
for (ByteBuf byteBuf : testSlice) {
if (!exceptionCaught) {
try {
decompressor.decodeOneMessage(byteBuf);
} catch (CompressionException e) {
exceptionCaught = true;
}
}
byteBuf.release();
}
assertThat(exceptionCaught).isTrue();
assertThat(testSlice).allSatisfy(b -> assertThat(b.refCnt()).isEqualTo(0));
}
}
use of tech.pegasys.teku.networking.eth2.rpc.core.encodings.compression.exceptions.CompressionException in project teku by ConsenSys.
the class SnappyCompressorTest method uncompress_maliciousBytes.
// netty compressor doesn't check that assumption
@Test
public void uncompress_maliciousBytes() {
// The number of underlying uncompressed bytes encoded
final int uncompressedByteCount = 4;
// Build a set of compressed data with a snappy header, and one frame for each uncompressed byte
final Bytes singleByte = compressor.compress(Bytes.of(0x01));
final Bytes singleByteFrame = singleByte.slice(SNAPPY_HEADER.size());
final Bytes[] headerAndFrames = new Bytes[uncompressedByteCount + 1];
headerAndFrames[0] = SNAPPY_HEADER;
for (int i = 0; i < uncompressedByteCount; i++) {
headerAndFrames[i + 1] = singleByteFrame;
}
final Bytes maliciousPayload = Bytes.concatenate(headerAndFrames);
// Check assumptions - we want to build a set of bytes with valid frames that
// exceeds the maximum expected compressed size given the underlying data
final int maxExpectedCompressedBytes = compressor.getMaxCompressedLength(uncompressedByteCount);
assertThat(maliciousPayload.size()).isGreaterThan(maxExpectedCompressedBytes);
List<List<ByteBuf>> testSlices = Utils.generateTestSlices(maliciousPayload);
for (List<ByteBuf> testSlice : testSlices) {
Decompressor decompressor = new SnappyFramedCompressor().createDecompressor(uncompressedByteCount);
boolean exceptionCaught = false;
for (ByteBuf byteBuf : testSlice) {
if (!exceptionCaught) {
try {
decompressor.decodeOneMessage(byteBuf);
} catch (CompressionException e) {
exceptionCaught = true;
}
}
byteBuf.release();
}
assertThat(exceptionCaught).isTrue();
assertThat(testSlice).allSatisfy(b -> assertThat(b.refCnt()).isEqualTo(0));
}
}
Aggregations