use of io.netty.buffer.CompositeByteBuf in project drill by apache.
the class SaslEncryptionHandler method encode.
public void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws IOException {
if (!ctx.channel().isOpen()) {
logger.debug("In " + RpcConstants.SASL_ENCRYPTION_HANDLER + " and channel is not open. " + "So releasing msg memory before encryption.");
msg.release();
return;
}
try {
// If encryption is enabled then this handler will always get ByteBuf of type Composite ByteBuf
assert (msg instanceof CompositeByteBuf);
final CompositeByteBuf cbb = (CompositeByteBuf) msg;
final int numComponents = cbb.numComponents();
// Get all the components inside the Composite ByteBuf for encryption
for (int currentIndex = 0; currentIndex < numComponents; ++currentIndex) {
final ByteBuf component = cbb.component(currentIndex);
// will break the RPC message into chunks of wrapSizeLimit.
if (component.readableBytes() > wrapSizeLimit) {
throw new RpcException(String.format("Component Chunk size: %d is greater than the wrapSizeLimit: %d", component.readableBytes(), wrapSizeLimit));
}
// Uncomment the below code if msg can contain both of Direct and Heap ByteBuf. Currently Drill only supports
// DirectByteBuf so the below condition will always be false. If the msg are always HeapByteBuf then in
// addition also remove the allocation of origMsgBuffer from constructor.
/*if (component.hasArray()) {
origMsg = component.array();
} else {
if (RpcConstants.EXTRA_DEBUGGING) {
logger.trace("The input bytebuf is not backed by a byte array so allocating a new one");
}*/
final byte[] origMsg = origMsgBuffer;
component.getBytes(component.readerIndex(), origMsg, 0, component.readableBytes());
if (logger.isTraceEnabled()) {
logger.trace("Trying to encrypt chunk of size:{} with wrapSizeLimit:{} and chunkMode: {}", component.readableBytes(), wrapSizeLimit);
}
// Length to encrypt will be component length not origMsg length since that can be greater.
final byte[] wrappedMsg = saslCodec.wrap(origMsg, 0, component.readableBytes());
if (logger.isTraceEnabled()) {
logger.trace("Successfully encrypted message, original size: {} Final Size: {}", component.readableBytes(), wrappedMsg.length);
}
// Allocate the buffer (directByteBuff) for copying the encrypted byte array and 4 octets for length of the
// encrypted message. This is preferred since later on if the passed buffer is not in direct memory then it
// will be copied by the channel into a temporary direct memory which will be cached to the thread. The size
// of that temporary direct memory will be size of largest message send.
final ByteBuf encryptedBuf = ctx.alloc().buffer(wrappedMsg.length + RpcConstants.LENGTH_FIELD_LENGTH);
// Based on SASL RFC 2222/4422 we should have starting 4 octet as the length of the encrypted buffer in network
// byte order. SASL framework provided by JDK doesn't do that by default and leaves it upto application. Whereas
// Cyrus SASL implementation of sasl_encode does take care of this.
lengthOctets.putInt(wrappedMsg.length);
encryptedBuf.writeBytes(lengthOctets.array());
// reset the position for re-use in next round
lengthOctets.rewind();
// Write the encrypted bytes inside the buffer
encryptedBuf.writeBytes(wrappedMsg);
// Update the msg and component reader index
msg.skipBytes(component.readableBytes());
component.skipBytes(component.readableBytes());
// Add the encrypted buffer into the output to send it on wire.
out.add(encryptedBuf);
}
} catch (OutOfMemoryException e) {
logger.warn("Failure allocating buffer on incoming stream due to memory limits.");
msg.resetReaderIndex();
outOfMemoryHandler.handle();
} catch (IOException e) {
logger.error("Something went wrong while wrapping the message: {} with MaxRawWrapSize: {}, ChunkMode: {} " + "and error: {}", msg, wrapSizeLimit, e.getMessage());
throw e;
}
}
use of io.netty.buffer.CompositeByteBuf in project drill by apache.
the class ChunkCreationHandler method encode.
@Override
protected void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
if (RpcConstants.EXTRA_DEBUGGING) {
logger.debug("ChunkCreationHandler called with msg {} of size {} with chunkSize {}", msg, msg.readableBytes(), chunkSize);
}
if (!ctx.channel().isOpen()) {
logger.debug("Channel closed, skipping encode inside {}.", RpcConstants.CHUNK_CREATION_HANDLER);
msg.release();
return;
}
// Calculate the number of chunks based on configured chunk size and input msg size
int numChunks = (int) Math.ceil((double) msg.readableBytes() / chunkSize);
// Initialize a composite buffer to hold numChunks chunk.
final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(numChunks);
int cbbWriteIndex = 0;
int currentChunkLen = min(msg.readableBytes(), chunkSize);
// Create slices of chunkSize from input msg and add it to the composite buffer.
while (numChunks > 0) {
final ByteBuf chunkBuf = msg.slice(msg.readerIndex(), currentChunkLen);
chunkBuf.retain();
cbb.addComponent(chunkBuf);
cbbWriteIndex += currentChunkLen;
msg.skipBytes(currentChunkLen);
--numChunks;
currentChunkLen = min(msg.readableBytes(), chunkSize);
}
// Update the writerIndex of composite byte buffer. Netty doesn't do it automatically.
cbb.writerIndex(cbbWriteIndex);
// Add the final composite bytebuf into output buffer.
out.add(cbb);
}
Aggregations