Search in sources :

Example 46 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project intellij-community by JetBrains.

the class Decoder method getBufferIfSufficient.

@Nullable
protected final ByteBuf getBufferIfSufficient(@NotNull ByteBuf input, int requiredLength, @NotNull ChannelHandlerContext context) {
    if (!input.isReadable()) {
        return null;
    }
    if (cumulation == null) {
        if (input.readableBytes() < requiredLength) {
            cumulation = input;
            input.retain();
            input.touch();
            return null;
        } else {
            return input;
        }
    } else {
        int currentAccumulatedByteCount = cumulation.readableBytes();
        if ((currentAccumulatedByteCount + input.readableBytes()) < requiredLength) {
            CompositeByteBuf compositeByteBuf;
            if ((cumulation instanceof CompositeByteBuf)) {
                compositeByteBuf = (CompositeByteBuf) cumulation;
            } else {
                compositeByteBuf = context.alloc().compositeBuffer(DEFAULT_MAX_COMPOSITE_BUFFER_COMPONENTS);
                compositeByteBuf.addComponent(cumulation);
                cumulation = compositeByteBuf;
            }
            compositeByteBuf.addComponent(input);
            input.retain();
            input.touch();
            return null;
        } else {
            CompositeByteBuf buffer;
            if (cumulation instanceof CompositeByteBuf) {
                buffer = (CompositeByteBuf) cumulation;
                buffer.addComponent(input);
            } else {
                // may be it will be used by client to cumulate something - don't set artificial restriction (2)
                buffer = context.alloc().compositeBuffer(DEFAULT_MAX_COMPOSITE_BUFFER_COMPONENTS);
                buffer.addComponents(cumulation, input);
            }
            // we don't set writerIndex on addComponent, it is clear to set it to requiredLength here
            buffer.writerIndex(requiredLength);
            input.skipBytes(requiredLength - currentAccumulatedByteCount);
            input.retain();
            input.touch();
            cumulation = null;
            return buffer;
        }
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) Nullable(org.jetbrains.annotations.Nullable)

Example 47 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project drill by axbaretto.

the class SaslEncryptionHandler method encode.

public void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws IOException {
    if (!ctx.channel().isOpen()) {
        logger.debug("In " + RpcConstants.SASL_ENCRYPTION_HANDLER + " and channel is not open. " + "So releasing msg memory before encryption.");
        msg.release();
        return;
    }
    try {
        // If encryption is enabled then this handler will always get ByteBuf of type Composite ByteBuf
        assert (msg instanceof CompositeByteBuf);
        final CompositeByteBuf cbb = (CompositeByteBuf) msg;
        final int numComponents = cbb.numComponents();
        // Get all the components inside the Composite ByteBuf for encryption
        for (int currentIndex = 0; currentIndex < numComponents; ++currentIndex) {
            final ByteBuf component = cbb.component(currentIndex);
            // will break the RPC message into chunks of wrapSizeLimit.
            if (component.readableBytes() > wrapSizeLimit) {
                throw new RpcException(String.format("Component Chunk size: %d is greater than the wrapSizeLimit: %d", component.readableBytes(), wrapSizeLimit));
            }
            // Uncomment the below code if msg can contain both of Direct and Heap ByteBuf. Currently Drill only supports
            // DirectByteBuf so the below condition will always be false. If the msg are always HeapByteBuf then in
            // addition also remove the allocation of origMsgBuffer from constructor.
            /*if (component.hasArray()) {
          origMsg = component.array();
        } else {

        if (RpcConstants.EXTRA_DEBUGGING) {
          logger.trace("The input bytebuf is not backed by a byte array so allocating a new one");
        }*/
            final byte[] origMsg = origMsgBuffer;
            component.getBytes(component.readerIndex(), origMsg, 0, component.readableBytes());
            if (logger.isTraceEnabled()) {
                logger.trace("Trying to encrypt chunk of size:{} with wrapSizeLimit:{} and chunkMode: {}", component.readableBytes(), wrapSizeLimit);
            }
            // Length to encrypt will be component length not origMsg length since that can be greater.
            final byte[] wrappedMsg = saslCodec.wrap(origMsg, 0, component.readableBytes());
            if (logger.isTraceEnabled()) {
                logger.trace("Successfully encrypted message, original size: {} Final Size: {}", component.readableBytes(), wrappedMsg.length);
            }
            // Allocate the buffer (directByteBuff) for copying the encrypted byte array and 4 octets for length of the
            // encrypted message. This is preferred since later on if the passed buffer is not in direct memory then it
            // will be copied by the channel into a temporary direct memory which will be cached to the thread. The size
            // of that temporary direct memory will be size of largest message send.
            final ByteBuf encryptedBuf = ctx.alloc().buffer(wrappedMsg.length + RpcConstants.LENGTH_FIELD_LENGTH);
            // Based on SASL RFC 2222/4422 we should have starting 4 octet as the length of the encrypted buffer in network
            // byte order. SASL framework provided by JDK doesn't do that by default and leaves it upto application. Whereas
            // Cyrus SASL implementation of sasl_encode does take care of this.
            lengthOctets.putInt(wrappedMsg.length);
            encryptedBuf.writeBytes(lengthOctets.array());
            // reset the position for re-use in next round
            lengthOctets.rewind();
            // Write the encrypted bytes inside the buffer
            encryptedBuf.writeBytes(wrappedMsg);
            // Update the msg and component reader index
            msg.skipBytes(component.readableBytes());
            component.skipBytes(component.readableBytes());
            // Add the encrypted buffer into the output to send it on wire.
            out.add(encryptedBuf);
        }
    } catch (OutOfMemoryException e) {
        logger.warn("Failure allocating buffer on incoming stream due to memory limits.");
        msg.resetReaderIndex();
        outOfMemoryHandler.handle();
    } catch (IOException e) {
        logger.error("Something went wrong while wrapping the message: {} with MaxRawWrapSize: {}, ChunkMode: {} " + "and error: {}", msg, wrapSizeLimit, e.getMessage());
        throw e;
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) IOException(java.io.IOException) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf) OutOfMemoryException(org.apache.drill.exec.exception.OutOfMemoryException)

Example 48 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project drill by axbaretto.

the class RpcEncoder method encode.

@Override
protected void encode(ChannelHandlerContext ctx, OutboundRpcMessage msg, List<Object> out) throws Exception {
    if (RpcConstants.EXTRA_DEBUGGING) {
        logger.debug("Rpc Encoder called with msg {}", msg);
    }
    if (!ctx.channel().isOpen()) {
        // output.add(ctx.alloc().buffer(0));
        logger.debug("Channel closed, skipping encode.");
        msg.release();
        return;
    }
    try {
        if (RpcConstants.EXTRA_DEBUGGING) {
            logger.debug("Encoding outbound message {}", msg);
        }
        // first we build the RpcHeader
        RpcHeader header = // 
        RpcHeader.newBuilder().setMode(// 
        msg.mode).setCoordinationId(// 
        msg.coordinationId).setRpcType(msg.rpcType).build();
        // figure out the full length
        int headerLength = header.getSerializedSize();
        int protoBodyLength = msg.pBody.getSerializedSize();
        int rawBodyLength = msg.getRawBodySize();
        // 
        int fullLength = // 
        HEADER_TAG_LENGTH + getRawVarintSize(headerLength) + headerLength + PROTOBUF_BODY_TAG_LENGTH + getRawVarintSize(protoBodyLength) + // 
        protoBodyLength;
        if (rawBodyLength > 0) {
            fullLength += (RAW_BODY_TAG_LENGTH + getRawVarintSize(rawBodyLength) + rawBodyLength);
        }
        ByteBuf buf = ctx.alloc().buffer();
        OutputStream os = new ByteBufOutputStream(buf);
        CodedOutputStream cos = CodedOutputStream.newInstance(os);
        // write full length first (this is length delimited stream).
        cos.writeRawVarint32(fullLength);
        // write header
        cos.writeRawVarint32(HEADER_TAG);
        cos.writeRawVarint32(headerLength);
        header.writeTo(cos);
        // write protobuf body length and body
        cos.writeRawVarint32(PROTOBUF_BODY_TAG);
        cos.writeRawVarint32(protoBodyLength);
        msg.pBody.writeTo(cos);
        // if exists, write data body and tag.
        if (msg.getRawBodySize() > 0) {
            if (RpcConstants.EXTRA_DEBUGGING) {
                logger.debug("Writing raw body of size {}", msg.getRawBodySize());
            }
            cos.writeRawVarint32(RAW_BODY_TAG);
            cos.writeRawVarint32(rawBodyLength);
            // need to flush so that dbody goes after if cos is caching.
            cos.flush();
            final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(msg.dBodies.length + 1);
            cbb.addComponent(buf);
            int bufLength = buf.readableBytes();
            for (ByteBuf b : msg.dBodies) {
                cbb.addComponent(b);
                bufLength += b.readableBytes();
            }
            cbb.writerIndex(bufLength);
            out.add(cbb);
        } else {
            cos.flush();
            out.add(buf);
        }
        if (RpcConstants.SOME_DEBUGGING) {
            logger.debug("Wrote message length {}:{} bytes (head:body).  Message: " + msg, getRawVarintSize(fullLength), fullLength);
        }
        if (RpcConstants.EXTRA_DEBUGGING) {
            logger.debug("Sent message.  Ending writer index was {}.", buf.writerIndex());
        }
    } finally {
    // make sure to release Rpc Messages underlying byte buffers.
    // msg.release();
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) RpcHeader(org.apache.drill.exec.proto.GeneralRPCProtos.RpcHeader) ByteBufOutputStream(io.netty.buffer.ByteBufOutputStream) OutputStream(java.io.OutputStream) ByteBufOutputStream(io.netty.buffer.ByteBufOutputStream) CodedOutputStream(com.google.protobuf.CodedOutputStream) CodedOutputStream(com.google.protobuf.CodedOutputStream) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf)

Example 49 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project bookkeeper by apache.

the class IdentityCompressionCodec method compress.

@Override
public ByteBuf compress(ByteBuf uncompressed, int headerLen) {
    checkNotNull(uncompressed);
    checkArgument(uncompressed.readableBytes() >= 0);
    if (headerLen == 0) {
        return uncompressed.retain();
    } else {
        CompositeByteBuf composited = PooledByteBufAllocator.DEFAULT.compositeBuffer(2);
        composited.addComponent(PooledByteBufAllocator.DEFAULT.buffer(headerLen, headerLen));
        composited.addComponent(uncompressed.retain());
        return composited;
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf)

Example 50 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project jeesuite-libs by vakinge.

the class FileOperationEncoder method encode.

@Override
public List<Object> encode(ByteBufAllocator alloc) {
    ByteBuf meta = metadata(alloc);
    ByteBuf head = alloc.buffer(FDFS_HEAD_LEN);
    head.writeLong(meta.readableBytes() + size);
    head.writeByte(cmd());
    head.writeByte(ERRNO_OK);
    CompositeByteBuf cbb = alloc.compositeBuffer();
    cbb.addComponents(head, meta);
    cbb.writerIndex(head.readableBytes() + meta.readableBytes());
    List<Object> requests = new LinkedList<>();
    requests.add(cbb);
    requests.add(content);
    return requests;
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf) LinkedList(java.util.LinkedList)

Aggregations

CompositeByteBuf (io.netty.buffer.CompositeByteBuf)86 ByteBuf (io.netty.buffer.ByteBuf)65 IOException (java.io.IOException)11 ArrayList (java.util.ArrayList)10 Test (org.junit.Test)8 ByteBuffer (java.nio.ByteBuffer)7 ChannelFuture (io.netty.channel.ChannelFuture)6 Channel (io.netty.channel.Channel)5 ChannelFutureListener (io.netty.channel.ChannelFutureListener)5 ChannelHandlerContext (io.netty.channel.ChannelHandlerContext)5 EmbeddedChannel (io.netty.channel.embedded.EmbeddedChannel)4 Test (org.junit.jupiter.api.Test)4 ChannelInboundHandlerAdapter (io.netty.channel.ChannelInboundHandlerAdapter)3 CodecException (io.netty.handler.codec.CodecException)3 SelfSignedCertificate (io.netty.handler.ssl.util.SelfSignedCertificate)3 InetSocketAddress (java.net.InetSocketAddress)3 ClosedChannelException (java.nio.channels.ClosedChannelException)3 List (java.util.List)3 ExecutionException (java.util.concurrent.ExecutionException)3 AtomicReference (java.util.concurrent.atomic.AtomicReference)3