use of io.netty.handler.codec.http.HttpContent in project ambry by linkedin.
the class CopyForcingByteBuf method doBackPressureTest.
/**
* Does the backpressure test by ensuring that {@link Channel#read()} isn't called when the number of bytes buffered
* is above the {@link NettyRequest#bufferWatermark}. Also ensures that {@link Channel#read()} is called correctly
* when the number of buffered bytes falls below the {@link NettyRequest#bufferWatermark}.
* @param digestAlgorithm the digest algorithm to use. Can be empty or {@code null} if digest checking is not
* required.
* @param content the complete content.
* @param httpContents {@code content} in parts and as {@link HttpContent}. Should contain all the data in
* {@code content}.
* @param numChunksToAddBeforeRead the number of {@link HttpContent} to add before making the
* {@link NettyRequest#readInto(AsyncWritableChannel, Callback)} call.
* @param method Http Method
* @throws Exception
*/
private void doBackPressureTest(String digestAlgorithm, ByteBuffer content, List<HttpContent> httpContents, int numChunksToAddBeforeRead, HttpMethod method) throws Exception {
if (numChunksToAddBeforeRead < 0 || numChunksToAddBeforeRead > httpContents.size()) {
throw new IllegalArgumentException("Illegal value of numChunksToAddBeforeRead");
}
MockChannel channel = new MockChannel();
final NettyRequest nettyRequest = createNettyRequest(method, "/", null, channel);
byte[] wholeDigest = null;
if (digestAlgorithm != null && !digestAlgorithm.isEmpty()) {
MessageDigest digest = MessageDigest.getInstance(digestAlgorithm);
digest.update(content);
wholeDigest = digest.digest();
content.rewind();
nettyRequest.setDigestAlgorithm(digestAlgorithm);
}
final AtomicInteger queuedReads = new AtomicInteger(0);
ByteBufferAsyncWritableChannel writeChannel = new ByteBufferAsyncWritableChannel();
ReadIntoCallback callback = new ReadIntoCallback();
channel.setChannelReadCallback(new MockChannel.ChannelReadCallback() {
@Override
public void onRead() {
queuedReads.incrementAndGet();
}
});
int addedCount = 0;
Future<Long> future = null;
boolean suspended = false;
int bytesToVerify = 0;
while (addedCount < httpContents.size()) {
if (suspended) {
assertEquals("There should have been no reads queued when over buffer watermark", 0, queuedReads.get());
if (future == null) {
future = nettyRequest.readInto(writeChannel, callback);
}
int chunksRead = readAndVerify(bytesToVerify, writeChannel, content);
assertEquals("Number of reads triggered is not as expected", chunksRead, queuedReads.get());
// collapse many reads into one
queuedReads.set(1);
bytesToVerify = 0;
suspended = false;
} else {
assertEquals("There should have been only one read queued", 1, queuedReads.get());
queuedReads.set(0);
if (future == null && addedCount == numChunksToAddBeforeRead) {
future = nettyRequest.readInto(writeChannel, callback);
}
final HttpContent httpContent = httpContents.get(addedCount);
bytesToVerify += (httpContent.content().readableBytes());
suspended = bytesToVerify >= NettyRequest.bufferWatermark;
addedCount++;
nettyRequest.addContent(httpContent);
int expectedRefCountOnAdd = future == null || httpContent.content().nioBufferCount() > 0 ? 2 : 1;
assertEquals("Reference count is not as expected", expectedRefCountOnAdd, httpContent.refCnt());
}
}
if (future == null) {
future = nettyRequest.readInto(writeChannel, callback);
}
readAndVerify(bytesToVerify, writeChannel, content);
verifyRefCnts(httpContents);
writeChannel.close();
callback.awaitCallback();
if (callback.exception != null) {
throw callback.exception;
}
long futureBytesRead = future.get(1, TimeUnit.SECONDS);
assertEquals("Total bytes read does not match (callback)", content.limit(), callback.bytesRead);
assertEquals("Total bytes read does not match (future)", content.limit(), futureBytesRead);
// check twice to make sure the same digest is returned every time
for (int i = 0; i < 2; i++) {
assertArrayEquals("Part by part digest should match digest of whole", wholeDigest, nettyRequest.getDigest());
}
closeRequestAndValidate(nettyRequest, channel);
}
use of io.netty.handler.codec.http.HttpContent in project ambry by linkedin.
the class CopyForcingByteBuf method sizeInHeaderLessThanContentTest.
/**
* Tests reaction of NettyRequest when content size is more than the size specified in the headers.
* @throws Exception
*/
private void sizeInHeaderLessThanContentTest() throws Exception {
List<HttpContent> httpContents = new ArrayList<HttpContent>();
ByteBuffer content = generateContent(httpContents);
HttpHeaders httpHeaders = new DefaultHttpHeaders();
int lastHttpContentSize = httpContents.get(httpContents.size() - 1).content().readableBytes();
httpHeaders.set(HttpHeaderNames.CONTENT_LENGTH, content.limit() - lastHttpContentSize - 1);
doHeaderAndContentSizeMismatchTest(httpHeaders, httpContents);
}
use of io.netty.handler.codec.http.HttpContent in project ambry by linkedin.
the class CopyForcingByteBuf method backPressureTest.
/**
* Tests backpressure support in {@link NettyRequest} for different values of {@link NettyRequest#bufferWatermark}.
* @param digestAlgorithm the digest algorithm to use. Can be empty or {@code null} if digest checking is not
* required.
* @param useCopyForcingByteBuf if {@code true}, uses {@link CopyForcingByteBuf} instead of the default
* {@link ByteBuf}.
* @param method Http method
* @throws Exception
*/
private void backPressureTest(String digestAlgorithm, boolean useCopyForcingByteBuf, HttpMethod method) throws Exception {
List<HttpContent> httpContents = new ArrayList<HttpContent>();
byte[] contentBytes = TestUtils.getRandomBytes(GENERATED_CONTENT_SIZE);
ByteBuffer content = ByteBuffer.wrap(contentBytes);
splitContent(contentBytes, GENERATED_CONTENT_PART_COUNT, httpContents, useCopyForcingByteBuf);
int chunkSize = httpContents.get(0).content().readableBytes();
int[] bufferWatermarks = { 1, chunkSize - 1, chunkSize, chunkSize + 1, chunkSize * httpContents.size() / 2, content.limit() - 1, content.limit(), content.limit() + 1 };
for (int bufferWatermark : bufferWatermarks) {
NettyRequest.bufferWatermark = bufferWatermark;
// start reading before addition of content
httpContents.clear();
content.rewind();
splitContent(contentBytes, GENERATED_CONTENT_PART_COUNT, httpContents, useCopyForcingByteBuf);
doBackPressureTest(digestAlgorithm, content, httpContents, 0, method);
// start reading in the middle of content add
httpContents.clear();
content.rewind();
splitContent(contentBytes, GENERATED_CONTENT_PART_COUNT, httpContents, useCopyForcingByteBuf);
doBackPressureTest(digestAlgorithm, content, httpContents, httpContents.size() / 2, method);
// start reading after all content added
httpContents.clear();
content.rewind();
splitContent(contentBytes, GENERATED_CONTENT_PART_COUNT, httpContents, useCopyForcingByteBuf);
doBackPressureTest(digestAlgorithm, content, httpContents, httpContents.size(), method);
}
}
use of io.netty.handler.codec.http.HttpContent in project ambry by linkedin.
the class ChannelWriteCallback method errorResponseTest.
/**
* Tests that error responses are correctly formed.
*/
@Test
public void errorResponseTest() {
EmbeddedChannel channel = createEmbeddedChannel();
for (RestServiceErrorCode errorCode : RestServiceErrorCode.values()) {
HttpHeaders httpHeaders = new DefaultHttpHeaders();
httpHeaders.set(MockNettyMessageProcessor.REST_SERVICE_ERROR_CODE_HEADER_NAME, errorCode);
channel.writeInbound(RestTestUtils.createRequest(HttpMethod.HEAD, TestingUri.OnResponseCompleteWithRestException.toString(), httpHeaders));
HttpResponse response = channel.readOutbound();
HttpResponseStatus expectedStatus = getExpectedHttpResponseStatus(errorCode);
assertEquals("Unexpected response status", expectedStatus, response.status());
boolean containsFailureReasonHeader = response.headers().contains(NettyResponseChannel.FAILURE_REASON_HEADER);
if (expectedStatus == HttpResponseStatus.BAD_REQUEST) {
assertTrue("Could not find failure reason header.", containsFailureReasonHeader);
} else {
assertFalse("Should not have found failure reason header.", containsFailureReasonHeader);
}
if (HttpStatusClass.CLIENT_ERROR.contains(response.status().code())) {
assertEquals("Wrong error code", errorCode, RestServiceErrorCode.valueOf(response.headers().get(NettyResponseChannel.ERROR_CODE_HEADER)));
} else {
assertFalse("Should not have found error code header", response.headers().contains(NettyResponseChannel.ERROR_CODE_HEADER));
}
if (response instanceof FullHttpResponse) {
// assert that there is no content
assertEquals("The response should not contain content", 0, ((FullHttpResponse) response).content().readableBytes());
} else {
HttpContent content = channel.readOutbound();
assertTrue("End marker should be received", content instanceof LastHttpContent);
}
assertNull("There should be no more data in the channel", channel.readOutbound());
boolean shouldBeAlive = !NettyResponseChannel.CLOSE_CONNECTION_ERROR_STATUSES.contains(expectedStatus);
assertEquals("Channel state (open/close) not as expected", shouldBeAlive, channel.isActive());
assertEquals("Connection header should be consistent with channel state", shouldBeAlive, HttpUtil.isKeepAlive(response));
if (!shouldBeAlive) {
channel = createEmbeddedChannel();
}
}
channel.close();
}
use of io.netty.handler.codec.http.HttpContent in project ambry by linkedin.
the class FrontendIntegrationTest method getContent.
/**
* Combines all the parts in {@code contents} into one {@link ByteBuffer}.
* @param contents the content of the response.
* @param expectedContentLength the length of the contents in bytes.
* @return a {@link ByteBuffer} that contains all the data in {@code contents}.
*/
private ByteBuffer getContent(Queue<HttpObject> contents, long expectedContentLength) {
ByteBuffer buffer = ByteBuffer.allocate((int) expectedContentLength);
boolean endMarkerFound = false;
for (HttpObject object : contents) {
assertFalse("There should have been no more data after the end marker was found", endMarkerFound);
HttpContent content = (HttpContent) object;
buffer.put(content.content().nioBuffer());
endMarkerFound = object instanceof LastHttpContent;
ReferenceCountUtil.release(content);
}
assertEquals("Content length did not match expected", expectedContentLength, buffer.position());
assertTrue("End marker was not found", endMarkerFound);
buffer.flip();
return buffer;
}
Aggregations