use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project drill by axbaretto.
the class ChunkCreationHandler method encode.
@Override
protected void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
if (RpcConstants.EXTRA_DEBUGGING) {
logger.debug("ChunkCreationHandler called with msg {} of size {} with chunkSize {}", msg, msg.readableBytes(), chunkSize);
}
if (!ctx.channel().isOpen()) {
logger.debug("Channel closed, skipping encode inside {}.", RpcConstants.CHUNK_CREATION_HANDLER);
msg.release();
return;
}
// Calculate the number of chunks based on configured chunk size and input msg size
int numChunks = (int) Math.ceil((double) msg.readableBytes() / chunkSize);
// Initialize a composite buffer to hold numChunks chunk.
final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(numChunks);
int cbbWriteIndex = 0;
int currentChunkLen = min(msg.readableBytes(), chunkSize);
// Create slices of chunkSize from input msg and add it to the composite buffer.
while (numChunks > 0) {
final ByteBuf chunkBuf = msg.slice(msg.readerIndex(), currentChunkLen);
chunkBuf.retain();
cbb.addComponent(chunkBuf);
cbbWriteIndex += currentChunkLen;
msg.skipBytes(currentChunkLen);
--numChunks;
currentChunkLen = min(msg.readableBytes(), chunkSize);
}
// Update the writerIndex of composite byte buffer. Netty doesn't do it automatically.
cbb.writerIndex(cbbWriteIndex);
// Add the final composite bytebuf into output buffer.
out.add(cbb);
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project spring-security by spring-projects.
the class AuthenticationPayloadInterceptorTests method createRequestPayload.
private Payload createRequestPayload() {
UsernamePasswordMetadata credentials = new UsernamePasswordMetadata("user", "password");
BasicAuthenticationEncoder encoder = new BasicAuthenticationEncoder();
DefaultDataBufferFactory factory = new DefaultDataBufferFactory();
ResolvableType elementType = ResolvableType.forClass(UsernamePasswordMetadata.class);
MimeType mimeType = UsernamePasswordMetadata.BASIC_AUTHENTICATION_MIME_TYPE;
Map<String, Object> hints = null;
DataBuffer dataBuffer = encoder.encodeValue(credentials, factory, elementType, mimeType, hints);
ByteBufAllocator allocator = ByteBufAllocator.DEFAULT;
CompositeByteBuf metadata = allocator.compositeBuffer();
CompositeMetadataCodec.encodeAndAddMetadata(metadata, allocator, mimeType.toString(), NettyDataBufferFactory.toByteBuf(dataBuffer));
return DefaultPayload.create(allocator.buffer(), metadata);
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class CompositeBufferGatheringWriteTest method testCompositeBufferPartialWriteDoesNotCorruptData.
public void testCompositeBufferPartialWriteDoesNotCorruptData(ServerBootstrap sb, Bootstrap cb) throws Throwable {
// The scenario is the following:
// Limit SO_SNDBUF so that a single buffer can be written, and part of a CompositeByteBuf at the same time.
// We then write the single buffer, the CompositeByteBuf, and another single buffer and verify the data is not
// corrupted when we read it on the other side.
Channel serverChannel = null;
Channel clientChannel = null;
try {
Random r = new Random();
final int soSndBuf = 1024;
ByteBufAllocator alloc = ByteBufAllocator.DEFAULT;
final ByteBuf expectedContent = alloc.buffer(soSndBuf * 2);
expectedContent.writeBytes(newRandomBytes(expectedContent.writableBytes(), r));
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Object> clientReceived = new AtomicReference<Object>();
sb.childOption(ChannelOption.SO_SNDBUF, soSndBuf).childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
compositeBufferPartialWriteDoesNotCorruptDataInitServerConfig(ctx.channel().config(), soSndBuf);
// First single write
int offset = soSndBuf - 100;
ctx.write(expectedContent.retainedSlice(expectedContent.readerIndex(), offset));
// Build and write CompositeByteBuf
CompositeByteBuf compositeByteBuf = ctx.alloc().compositeBuffer();
compositeByteBuf.addComponent(true, expectedContent.retainedSlice(expectedContent.readerIndex() + offset, 50));
offset += 50;
compositeByteBuf.addComponent(true, expectedContent.retainedSlice(expectedContent.readerIndex() + offset, 200));
offset += 200;
ctx.write(compositeByteBuf);
// Write a single buffer that is smaller than the second component of the CompositeByteBuf
// above but small enough to fit in the remaining space allowed by the soSndBuf amount.
ctx.write(expectedContent.retainedSlice(expectedContent.readerIndex() + offset, 50));
offset += 50;
// Write the remainder of the content
ctx.writeAndFlush(expectedContent.retainedSlice(expectedContent.readerIndex() + offset, expectedContent.readableBytes() - expectedContent.readerIndex() - offset)).addListener(ChannelFutureListener.CLOSE);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
// IOException is fine as it will also close the channel and may just be a connection reset.
if (!(cause instanceof IOException)) {
clientReceived.set(cause);
latch.countDown();
}
}
});
}
});
cb.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
private ByteBuf aggregator;
@Override
public void handlerAdded(ChannelHandlerContext ctx) {
aggregator = ctx.alloc().buffer(expectedContent.readableBytes());
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
try {
if (msg instanceof ByteBuf) {
aggregator.writeBytes((ByteBuf) msg);
}
} finally {
ReferenceCountUtil.release(msg);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
// IOException is fine as it will also close the channel and may just be a connection reset.
if (!(cause instanceof IOException)) {
clientReceived.set(cause);
latch.countDown();
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
if (clientReceived.compareAndSet(null, aggregator)) {
try {
assertEquals(expectedContent.readableBytes(), aggregator.readableBytes());
} catch (Throwable cause) {
aggregator.release();
aggregator = null;
clientReceived.set(cause);
} finally {
latch.countDown();
}
}
}
});
}
});
serverChannel = sb.bind().syncUninterruptibly().channel();
clientChannel = cb.connect(serverChannel.localAddress()).syncUninterruptibly().channel();
latch.await();
Object received = clientReceived.get();
if (received instanceof ByteBuf) {
ByteBuf actual = (ByteBuf) received;
assertEquals(expectedContent, actual);
expectedContent.release();
actual.release();
} else {
expectedContent.release();
throw (Throwable) received;
}
} finally {
if (clientChannel != null) {
clientChannel.close().sync();
}
if (serverChannel != null) {
serverChannel.close().sync();
}
}
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class AbstractEncoderTest method readDecompressed.
protected ByteBuf readDecompressed(final int dataLength) throws Exception {
CompositeByteBuf compressed = Unpooled.compositeBuffer();
ByteBuf msg;
while ((msg = channel.readOutbound()) != null) {
compressed.addComponent(true, msg);
}
return decompress(compressed, dataLength);
}
use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project netty by netty.
the class FastLzIntegrationTest method testIdentity.
// test batched flow of data
@Override
protected void testIdentity(final byte[] data, boolean heapBuffer) {
initChannels();
final ByteBuf original = heapBuffer ? Unpooled.wrappedBuffer(data) : Unpooled.directBuffer(data.length).writeBytes(data);
final CompositeByteBuf compressed = Unpooled.compositeBuffer();
final CompositeByteBuf decompressed = Unpooled.compositeBuffer();
try {
int written = 0, length = rand.nextInt(100);
while (written + length < data.length) {
ByteBuf in = Unpooled.wrappedBuffer(data, written, length);
encoder.writeOutbound(in);
written += length;
length = rand.nextInt(100);
}
ByteBuf in = Unpooled.wrappedBuffer(data, written, data.length - written);
encoder.writeOutbound(in);
encoder.finish();
ByteBuf msg;
while ((msg = encoder.readOutbound()) != null) {
compressed.addComponent(true, msg);
}
assertThat(compressed, is(notNullValue()));
final byte[] compressedArray = new byte[compressed.readableBytes()];
compressed.readBytes(compressedArray);
written = 0;
length = rand.nextInt(100);
while (written + length < compressedArray.length) {
in = Unpooled.wrappedBuffer(compressedArray, written, length);
decoder.writeInbound(in);
written += length;
length = rand.nextInt(100);
}
in = Unpooled.wrappedBuffer(compressedArray, written, compressedArray.length - written);
decoder.writeInbound(in);
assertFalse(compressed.isReadable());
while ((msg = decoder.readInbound()) != null) {
decompressed.addComponent(true, msg);
}
assertEquals(original, decompressed);
} finally {
compressed.release();
decompressed.release();
original.release();
closeChannels();
}
}
Aggregations