use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project alluxio by Alluxio.
the class GrpcSerializationUtils method getByteBufFromReadableBuffer.
/**
* Gets a Netty buffer directly from a gRPC ReadableBuffer.
*
* @param buffer the input buffer
* @return the raw ByteBuf, or null if the ByteBuf cannot be extracted
*/
public static ByteBuf getByteBufFromReadableBuffer(ReadableBuffer buffer) {
if (!sZeroCopyReceiveSupported) {
return null;
}
try {
if (buffer instanceof CompositeReadableBuffer) {
Queue<ReadableBuffer> buffers = (Queue<ReadableBuffer>) sCompositeBuffers.get(buffer);
if (buffers.size() == 1) {
return getByteBufFromReadableBuffer(buffers.peek());
} else {
CompositeByteBuf buf = PooledByteBufAllocator.DEFAULT.compositeBuffer();
for (ReadableBuffer readableBuffer : buffers) {
ByteBuf subBuffer = getByteBufFromReadableBuffer(readableBuffer);
if (subBuffer == null) {
return null;
}
buf.addComponent(true, subBuffer);
}
return buf;
}
} else if (buffer.getClass().equals(sReadableByteBuf.getDeclaringClass())) {
return (ByteBuf) sReadableByteBuf.get(buffer);
}
} catch (Exception e) {
LOG.warn("Failed to get data buffer from stream: {}.", e.toString());
return null;
}
return null;
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class HttpObjectAggregatorTest method checkContentBuffer.
private static void checkContentBuffer(FullHttpRequest aggregatedMessage) {
CompositeByteBuf buffer = (CompositeByteBuf) aggregatedMessage.content();
assertEquals(2, buffer.numComponents());
List<ByteBuf> buffers = buffer.decompose(0, buffer.capacity());
assertEquals(2, buffers.size());
for (ByteBuf buf : buffers) {
// This should be false as we decompose the buffer before to not have deep hierarchy
assertFalse(buf instanceof CompositeByteBuf);
}
aggregatedMessage.release();
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class SnappyFrameEncoderTest method testStreamStartIsOnlyWrittenOnce.
@Test
public void testStreamStartIsOnlyWrittenOnce() throws Exception {
ByteBuf in = Unpooled.wrappedBuffer(new byte[] { 'n', 'e', 't', 't', 'y' });
channel.writeOutbound(in.retain());
// rewind the buffer to write the same data
in.resetReaderIndex();
channel.writeOutbound(in);
assertTrue(channel.finish());
ByteBuf expected = Unpooled.wrappedBuffer(new byte[] { (byte) 0xff, 0x06, 0x00, 0x00, 0x73, 0x4e, 0x61, 0x50, 0x70, 0x59, 0x01, 0x09, 0x00, 0x00, 0x6f, -0x68, 0x2e, -0x47, 'n', 'e', 't', 't', 'y', 0x01, 0x09, 0x00, 0x00, 0x6f, -0x68, 0x2e, -0x47, 'n', 'e', 't', 't', 'y' });
CompositeByteBuf actual = Unpooled.compositeBuffer();
for (; ; ) {
ByteBuf m = channel.readOutbound();
if (m == null) {
break;
}
actual.addComponent(true, m);
}
assertEquals(expected, actual);
expected.release();
actual.release();
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class BrotliEncoderTest method readDecompressed.
@Override
protected ByteBuf readDecompressed(final int dataLength) throws Exception {
CompositeByteBuf decompressed = Unpooled.compositeBuffer();
ByteBuf msg;
while ((msg = channel.readOutbound()) != null) {
if (msg.isReadable()) {
decompressed.addComponent(true, decompress(msg, -1));
} else {
msg.release();
}
}
return decompressed;
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class MessageAggregator method decode.
@Override
protected void decode(final ChannelHandlerContext ctx, I msg, List<Object> out) throws Exception {
assert aggregating;
if (isStartMessage(msg)) {
handlingOversizedMessage = false;
if (currentMessage != null) {
currentMessage.release();
currentMessage = null;
throw new MessageAggregationException();
}
@SuppressWarnings("unchecked") S m = (S) msg;
// Send the continue response if necessary (e.g. 'Expect: 100-continue' header)
// Check before content length. Failing an expectation may result in a different response being sent.
Object continueResponse = newContinueResponse(m, maxContentLength, ctx.pipeline());
if (continueResponse != null) {
// Cache the write listener for reuse.
ChannelFutureListener listener = continueResponseWriteListener;
if (listener == null) {
continueResponseWriteListener = listener = new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!future.isSuccess()) {
ctx.fireExceptionCaught(future.cause());
}
}
};
}
// Make sure to call this before writing, otherwise reference counts may be invalid.
boolean closeAfterWrite = closeAfterContinueResponse(continueResponse);
handlingOversizedMessage = ignoreContentAfterContinueResponse(continueResponse);
final ChannelFuture future = ctx.writeAndFlush(continueResponse).addListener(listener);
if (closeAfterWrite) {
future.addListener(ChannelFutureListener.CLOSE);
return;
}
if (handlingOversizedMessage) {
return;
}
} else if (isContentLengthInvalid(m, maxContentLength)) {
// if content length is set, preemptively close if it's too large
invokeHandleOversizedMessage(ctx, m);
return;
}
if (m instanceof DecoderResultProvider && !((DecoderResultProvider) m).decoderResult().isSuccess()) {
O aggregated;
if (m instanceof ByteBufHolder) {
aggregated = beginAggregation(m, ((ByteBufHolder) m).content().retain());
} else {
aggregated = beginAggregation(m, EMPTY_BUFFER);
}
finishAggregation0(aggregated);
out.add(aggregated);
return;
}
// A streamed message - initialize the cumulative buffer, and wait for incoming chunks.
CompositeByteBuf content = ctx.alloc().compositeBuffer(maxCumulationBufferComponents);
if (m instanceof ByteBufHolder) {
appendPartialContent(content, ((ByteBufHolder) m).content());
}
currentMessage = beginAggregation(m, content);
} else if (isContentMessage(msg)) {
if (currentMessage == null) {
// until the begging of the next request/response.
return;
}
// Merge the received chunk into the content of the current message.
CompositeByteBuf content = (CompositeByteBuf) currentMessage.content();
@SuppressWarnings("unchecked") final C m = (C) msg;
// Handle oversized message.
if (content.readableBytes() > maxContentLength - m.content().readableBytes()) {
// By convention, full message type extends first message type.
@SuppressWarnings("unchecked") S s = (S) currentMessage;
invokeHandleOversizedMessage(ctx, s);
return;
}
// Append the content of the chunk.
appendPartialContent(content, m.content());
// Give the subtypes a chance to merge additional information such as trailing headers.
aggregate(currentMessage, m);
final boolean last;
if (m instanceof DecoderResultProvider) {
DecoderResult decoderResult = ((DecoderResultProvider) m).decoderResult();
if (!decoderResult.isSuccess()) {
if (currentMessage instanceof DecoderResultProvider) {
((DecoderResultProvider) currentMessage).setDecoderResult(DecoderResult.failure(decoderResult.cause()));
}
last = true;
} else {
last = isLastContentMessage(m);
}
} else {
last = isLastContentMessage(m);
}
if (last) {
finishAggregation0(currentMessage);
// All done
out.add(currentMessage);
currentMessage = null;
}
} else {
throw new MessageAggregationException();
}
}
Aggregations