Search in sources :

Example 6 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project pravega by pravega.

the class DirectMemoryBufferTests method read.

private BufferView read(DirectMemoryBuffer buffer, int blockId) {
    val readBuffers = new ArrayList<ByteBuf>();
    buffer.read(blockId, readBuffers);
    ByteBuf result = readBuffers.size() == 1 ? readBuffers.get(0) : new CompositeByteBuf(this.allocator, false, readBuffers.size(), Lists.reverse(readBuffers));
    return new ByteBufWrapper(result);
}
Also used : lombok.val(lombok.val) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ArrayList(java.util.ArrayList) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf) ByteBufWrapper(io.pravega.shared.protocol.netty.ByteBufWrapper)

Example 7 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project ambry by linkedin.

the class RetainingAsyncWritableChannelTest method basicsTestWithNettyByteBuf.

/**
 * Test that {@link RetainingAsyncWritableChannel} behaves as expected: chunks are copied, callback completed
 * immediately after {@link RetainingAsyncWritableChannel#write} method completes.
 */
@Test
public void basicsTestWithNettyByteBuf() throws Exception {
    for (boolean useCompositeByteBuf : Arrays.asList(false, true)) {
        List<byte[]> inputBuffers = getBuffers(1000, 20, 201, 0, 79, 1005);
        RetainingAsyncWritableChannel channel = new RetainingAsyncWritableChannel();
        for (int i = 0; i < inputBuffers.size(); i++) {
            byte[] data = inputBuffers.get(i);
            ByteBuf chunk;
            if (data.length == 0) {
                chunk = Unpooled.wrappedBuffer(data);
            } else if (!useCompositeByteBuf) {
                chunk = ByteBufAllocator.DEFAULT.heapBuffer(data.length);
                chunk.writeBytes(data);
            } else {
                CompositeByteBuf composite = ByteBufAllocator.DEFAULT.compositeHeapBuffer(100);
                ByteBuf c = ByteBufAllocator.DEFAULT.heapBuffer(data.length / 2);
                c.writeBytes(data, 0, data.length / 2);
                composite.addComponent(true, c);
                c = ByteBufAllocator.DEFAULT.heapBuffer(data.length - data.length / 2);
                c.writeBytes(data, data.length / 2, data.length - data.length / 2);
                composite.addComponent(true, c);
                chunk = composite;
            }
            writeAndCheckCallback(chunk, channel, chunk.readableBytes(), null, null);
        }
        checkStream(inputBuffers, channel);
        channel.close();
        writeAndCheckCallback(ByteBuffer.allocate(0), channel, 0, ClosedChannelException.class, null);
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf) Test(org.junit.Test)

Example 8 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project ambry by linkedin.

the class MessageReadSetIndexInputStream method fetchDataFromReadSet.

/**
 * Fetch data from the MessageReadSet that needs to be sent over the network based on the type of data requested as
 * indicated by the flags
 */
private void fetchDataFromReadSet() throws MessageFormatException {
    // get size
    int messageCount = readSet.count();
    ArrayList<ByteBuf> dataFromReadSet = new ArrayList<>(messageCount);
    try {
        // for each message, determine the offset and size that needs to be sent based on the flag
        sendInfoList = new ArrayList<>(messageCount);
        messageMetadataList = new ArrayList<>(messageCount);
        logger.trace("Calculate offsets of messages for one partition, MessageFormatFlag : {} number of messages : {}", flag, messageCount);
        for (int i = 0; i < messageCount; i++) {
            if (flag == MessageFormatFlags.All || flag == MessageFormatFlags.Blob) {
                // just copy over the total size and use relative offset to be 0
                // We do not have to check any version in this case as we dont
                // have to read any data to deserialize anything.
                readSet.doPrefetch(i, 0, readSet.sizeInBytes(i));
                dataFromReadSet.add(readSet.getPrefetchedData(i));
                if (flag == MessageFormatFlags.All) {
                    sendInfoList.add(i, new SendInfo(0, readSet.sizeInBytes(i)));
                    messageMetadataList.add(i, null);
                    totalSizeToWrite += readSet.sizeInBytes(i);
                } else if (flag == MessageFormatFlags.Blob) {
                    ByteBuf blobAll = readSet.getPrefetchedData(i);
                    InputStream blobInputStream = new ByteBufInputStream(blobAll);
                    MessageHeader_Format headerFormat = parseHeaderAndVerifyStoreKey(blobInputStream, i);
                    MessageMetadata messageMetadata = null;
                    if (headerFormat.hasEncryptionKeyRecord()) {
                        // If encryption key exists, MessageMetadata with encryption key is needed.
                        ByteBuf duplicatedByteBuf = blobAll.duplicate();
                        duplicatedByteBuf.setIndex(headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordRelativeOffset() + headerFormat.getBlobEncryptionKeyRecordSize());
                        messageMetadata = new MessageMetadata(deserializeBlobEncryptionKey(new ByteBufInputStream(duplicatedByteBuf)));
                    }
                    messageMetadataList.add(messageMetadata);
                    sendInfoList.add(i, new SendInfo(headerFormat.getBlobRecordRelativeOffset(), headerFormat.getBlobRecordSize()));
                    totalSizeToWrite += headerFormat.getBlobRecordSize();
                    // Adjust underlying ByteBuf reader and writer index.
                    blobAll.setIndex(headerFormat.getBlobRecordRelativeOffset(), (int) (headerFormat.getBlobRecordRelativeOffset() + headerFormat.getBlobRecordSize()));
                }
            } else {
                BufferedInputStream bufferedInputStream = new BufferedInputStream(new MessageReadSetIndexInputStream(readSet, i, 0), BUFFERED_INPUT_STREAM_BUFFER_SIZE);
                MessageHeader_Format headerFormat = parseHeaderAndVerifyStoreKey(bufferedInputStream, i);
                long startTime = SystemTime.getInstance().milliseconds();
                if (flag == MessageFormatFlags.BlobProperties) {
                    sendInfoList.add(i, new SendInfo(headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize()));
                    messageMetadataList.add(null);
                    readSet.doPrefetch(i, headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize());
                    totalSizeToWrite += headerFormat.getBlobPropertiesRecordSize();
                    logger.trace("Calculate offsets, get total size of blob properties time: {}", SystemTime.getInstance().milliseconds() - startTime);
                    logger.trace("Sending blob properties for message relativeOffset : {} size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
                } else if (flag == MessageFormatFlags.BlobUserMetadata) {
                    messageMetadataList.add(headerFormat.hasEncryptionKeyRecord() ? new MessageMetadata(extractEncryptionKey(i, headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordSize())) : null);
                    sendInfoList.add(i, new SendInfo(headerFormat.getUserMetadataRecordRelativeOffset(), headerFormat.getUserMetadataRecordSize()));
                    readSet.doPrefetch(i, headerFormat.getUserMetadataRecordRelativeOffset(), headerFormat.getUserMetadataRecordSize());
                    totalSizeToWrite += headerFormat.getUserMetadataRecordSize();
                    logger.trace("Calculate offsets, get total size of user metadata time: {}", SystemTime.getInstance().milliseconds() - startTime);
                    logger.trace("Sending user metadata for message relativeOffset : {} size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
                } else if (flag == MessageFormatFlags.BlobInfo) {
                    messageMetadataList.add(headerFormat.hasEncryptionKeyRecord() ? new MessageMetadata(extractEncryptionKey(i, headerFormat.getBlobEncryptionKeyRecordRelativeOffset(), headerFormat.getBlobEncryptionKeyRecordSize())) : null);
                    sendInfoList.add(i, new SendInfo(headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize() + headerFormat.getUserMetadataRecordSize()));
                    readSet.doPrefetch(i, headerFormat.getBlobPropertiesRecordRelativeOffset(), headerFormat.getBlobPropertiesRecordSize() + headerFormat.getUserMetadataRecordSize());
                    totalSizeToWrite += headerFormat.getBlobPropertiesRecordSize() + headerFormat.getUserMetadataRecordSize();
                    logger.trace("Calculate offsets, get total size of blob info time: {}", SystemTime.getInstance().milliseconds() - startTime);
                    logger.trace("Sending blob info (blob properties + user metadata) for message relativeOffset : {} " + "size : {}", sendInfoList.get(i).relativeOffset(), sendInfoList.get(i).sizetoSend());
                } else {
                    throw new MessageFormatException("Unknown flag in request " + flag, MessageFormatErrorCodes.IO_Error);
                }
                dataFromReadSet.add(readSet.getPrefetchedData(i));
            }
        }
        if (messageCount == 0) {
            messageContent = Unpooled.EMPTY_BUFFER;
        } else if (messageCount == 1) {
            messageContent = dataFromReadSet.get(0);
        } else {
            CompositeByteBuf compositeByteBuf = dataFromReadSet.get(0).alloc().compositeHeapBuffer(messageCount);
            for (ByteBuf data : dataFromReadSet) {
                compositeByteBuf.addComponent(true, data);
            }
            messageContent = compositeByteBuf;
        }
    } catch (IOException e) {
        for (ByteBuf data : dataFromReadSet) {
            data.release();
        }
        logger.trace("IOError when calculating offsets");
        throw new MessageFormatException("IOError when calculating offsets ", e, MessageFormatErrorCodes.IO_Error);
    }
}
Also used : DataInputStream(java.io.DataInputStream) BufferedInputStream(java.io.BufferedInputStream) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) ByteBufferInputStream(com.github.ambry.utils.ByteBufferInputStream) InputStream(java.io.InputStream) ArrayList(java.util.ArrayList) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) IOException(java.io.IOException) ByteBuf(io.netty.buffer.ByteBuf) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) BufferedInputStream(java.io.BufferedInputStream)

Example 9 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project ambry by linkedin.

the class RequestResponseTest method doPutRequestTest.

/**
 * Test Put requests in a specific version.
 * @param testVersion the version to use for the Put requests. If -1, uses the default version.
 * @param clusterMap the cluster map to use.
 * @param correlationId the correlation id associated with the request.
 * @param clientId the client id associated with the request.
 * @param blobId the blob id in the request.
 * @param blobProperties the {@link BlobProperties} associated with the request.
 * @param userMetadata the user metadata associated with the request.
 * @param blobType the {@link BlobType} associated with the request.
 * @param blob the blob content.
 * @param blobSize the size of the blob.
 * @param blobKey the encryption key of the blob.
 * @param expectedKey the expected encryption key from the deserialized Put request.
 */
private void doPutRequestTest(short testVersion, MockClusterMap clusterMap, int correlationId, String clientId, BlobId blobId, BlobProperties blobProperties, byte[] userMetadata, BlobType blobType, byte[] blob, int blobSize, byte[] blobKey, byte[] expectedKey) throws IOException {
    // This PutRequest is created just to get the size.
    PutRequest putRequest = new PutRequest(correlationId, clientId, blobId, blobProperties, ByteBuffer.wrap(userMetadata), Unpooled.wrappedBuffer(blob), blobSize, blobType, blobKey == null ? null : ByteBuffer.wrap(blobKey));
    int sizeInBytes = (int) putRequest.sizeInBytes();
    putRequest.release();
    // Initialize channel write limits in such a way that writeTo() may or may not be able to write out all the
    // data at once.
    int[] channelWriteLimits = { sizeInBytes, 2 * sizeInBytes, sizeInBytes / 2, sizeInBytes / (TestUtils.RANDOM.nextInt(sizeInBytes - 1) + 1) };
    int sizeInBlobProperties = (int) blobProperties.getBlobSize();
    DataInputStream requestStream;
    for (int allocationSize : channelWriteLimits) {
        for (boolean useCompositeBlob : new boolean[] { true, false }) {
            PutRequest request = null;
            switch(testVersion) {
                case InvalidVersionPutRequest.Put_Request_Invalid_version:
                    request = new InvalidVersionPutRequest(correlationId, clientId, blobId, blobProperties, ByteBuffer.wrap(userMetadata), Unpooled.wrappedBuffer(blob), sizeInBlobProperties, BlobType.DataBlob);
                    requestStream = serAndPrepForRead(request, -1, true);
                    try {
                        PutRequest.readFrom(requestStream, clusterMap);
                        Assert.fail("Deserialization of PutRequest with invalid version should have thrown an exception.");
                    } catch (IllegalStateException e) {
                    }
                    request.release();
                    break;
                default:
                    if (request == null) {
                        ByteBuf blobBuf = Unpooled.wrappedBuffer(blob);
                        if (useCompositeBlob) {
                            // break this into three ByteBuf and make a composite blob;
                            int start = 0, end = blob.length / 3;
                            ByteBuf blob1 = PooledByteBufAllocator.DEFAULT.heapBuffer(end - start);
                            blob1.writeBytes(blob, start, end - start);
                            start = end;
                            end += blob.length / 3;
                            ByteBuf blob2 = PooledByteBufAllocator.DEFAULT.heapBuffer(end - start);
                            blob2.writeBytes(blob, start, end - start);
                            start = end;
                            end = blob.length;
                            ByteBuf blob3 = PooledByteBufAllocator.DEFAULT.heapBuffer(end - start);
                            blob3.writeBytes(blob, start, end - start);
                            blobBuf = PooledByteBufAllocator.DEFAULT.compositeHeapBuffer(3);
                            ((CompositeByteBuf) blobBuf).addComponent(true, blob1);
                            ((CompositeByteBuf) blobBuf).addComponent(true, blob2);
                            ((CompositeByteBuf) blobBuf).addComponent(true, blob3);
                        }
                        request = new PutRequest(correlationId, clientId, blobId, blobProperties, ByteBuffer.wrap(userMetadata), blobBuf, blobSize, blobType, blobKey == null ? null : ByteBuffer.wrap(blobKey));
                    }
                    requestStream = serAndPrepForRead(request, allocationSize, true);
                    PutRequest deserializedPutRequest = PutRequest.readFrom(requestStream, clusterMap);
                    Assert.assertEquals(blobId, deserializedPutRequest.getBlobId());
                    Assert.assertEquals(sizeInBlobProperties, deserializedPutRequest.getBlobProperties().getBlobSize());
                    Assert.assertEquals(userMetadata.length, deserializedPutRequest.getUsermetadata().remaining());
                    byte[] deserializedUserMetadata = new byte[userMetadata.length];
                    deserializedPutRequest.getUsermetadata().get(deserializedUserMetadata);
                    Assert.assertArrayEquals(userMetadata, deserializedUserMetadata);
                    Assert.assertEquals(blobSize, deserializedPutRequest.getBlobSize());
                    Assert.assertEquals(blobType, deserializedPutRequest.getBlobType());
                    if (expectedKey == null) {
                        Assert.assertNull(deserializedPutRequest.getBlobEncryptionKey());
                    } else {
                        Assert.assertArrayEquals(expectedKey, deserializedPutRequest.getBlobEncryptionKey().array());
                    }
                    byte[] blobRead = new byte[blobSize];
                    deserializedPutRequest.getBlobStream().read(blobRead);
                    Assert.assertArrayEquals(blob, blobRead);
                    request.release();
                    break;
            }
        }
    }
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) DataInputStream(java.io.DataInputStream) NettyByteBufDataInputStream(com.github.ambry.utils.NettyByteBufDataInputStream) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf)

Example 10 with CompositeByteBuf

use of org.apache.flink.shaded.netty4.io.netty.buffer.CompositeByteBuf in project ambry by linkedin.

the class CryptoServiceTest method fromByteArrayToCompositeByteBuf.

/**
 * Create a {@link CompositeByteBuf} from the given byte array.
 * @param data the byte array.
 * @return A {@link CompositeByteBuf}.
 */
private CompositeByteBuf fromByteArrayToCompositeByteBuf(byte[] data) {
    int size = data.length;
    ByteBuf toEncrypt = Unpooled.wrappedBuffer(data);
    CompositeByteBuf composite = new CompositeByteBuf(toEncrypt.alloc(), toEncrypt.isDirect(), size);
    int start = 0;
    int end = 0;
    for (int j = 0; j < 3; j++) {
        start = end;
        end = TestUtils.RANDOM.nextInt(size / 2 - 1) + end;
        if (j == 2) {
            end = size;
        }
        ByteBuf c = Unpooled.buffer(end - start);
        c.writeBytes(data, start, end - start);
        composite.addComponent(true, c);
    }
    return composite;
}
Also used : CompositeByteBuf(io.netty.buffer.CompositeByteBuf) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf)

Aggregations

CompositeByteBuf (io.netty.buffer.CompositeByteBuf)86 ByteBuf (io.netty.buffer.ByteBuf)65 IOException (java.io.IOException)11 ArrayList (java.util.ArrayList)10 Test (org.junit.Test)8 ByteBuffer (java.nio.ByteBuffer)7 ChannelFuture (io.netty.channel.ChannelFuture)6 Channel (io.netty.channel.Channel)5 ChannelFutureListener (io.netty.channel.ChannelFutureListener)5 ChannelHandlerContext (io.netty.channel.ChannelHandlerContext)5 EmbeddedChannel (io.netty.channel.embedded.EmbeddedChannel)4 Test (org.junit.jupiter.api.Test)4 ChannelInboundHandlerAdapter (io.netty.channel.ChannelInboundHandlerAdapter)3 CodecException (io.netty.handler.codec.CodecException)3 SelfSignedCertificate (io.netty.handler.ssl.util.SelfSignedCertificate)3 InetSocketAddress (java.net.InetSocketAddress)3 ClosedChannelException (java.nio.channels.ClosedChannelException)3 List (java.util.List)3 ExecutionException (java.util.concurrent.ExecutionException)3 AtomicReference (java.util.concurrent.atomic.AtomicReference)3