use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpContent in project crate by crate.
the class HttpBlobHandler method channelRead0.
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof HttpRequest) {
HttpRequest request = currentMessage = (HttpRequest) msg;
String uri = request.uri();
if (!uri.startsWith(BLOBS_ENDPOINT)) {
reset();
ctx.fireChannelRead(msg);
return;
}
Matcher matcher = blobsMatcher.reset(uri);
if (!matcher.matches()) {
simpleResponse(request, HttpResponseStatus.NOT_FOUND);
return;
}
digestBlob = null;
index = BlobIndex.fullIndexName(matcher.group(1));
digest = matcher.group(2);
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("matches index:{} digest:{}", index, digest);
LOGGER.trace("HTTPMessage:%n{}", request);
}
handleBlobRequest(request, null);
} else if (msg instanceof HttpContent) {
if (currentMessage == null) {
// the chunk is probably from a regular non-blob request.
reset();
ctx.fireChannelRead(msg);
return;
}
handleBlobRequest(currentMessage, (HttpContent) msg);
} else {
// Neither HttpMessage or HttpChunk
reset();
ctx.fireChannelRead(msg);
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpContent in project cloudstack by apache.
the class HttpUploadServerHandler method channelRead0.
@Override
public void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if (msg instanceof HttpRequest) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("HTTP request: %s", msg));
}
HttpRequest request = this.request = (HttpRequest) msg;
responseContent.setLength(0);
if (request.getMethod().equals(HttpMethod.POST)) {
URI uri = new URI(request.getUri());
String signature = null;
String expires = null;
String metadata = null;
String hostname = null;
long contentLength = 0;
for (Entry<String, String> entry : request.headers()) {
switch(entry.getKey()) {
case HEADER_SIGNATURE:
signature = entry.getValue();
break;
case HEADER_METADATA:
metadata = entry.getValue();
break;
case HEADER_EXPIRES:
expires = entry.getValue();
break;
case HEADER_HOST:
hostname = entry.getValue();
break;
case HttpHeaders.Names.CONTENT_LENGTH:
contentLength = Long.parseLong(entry.getValue());
break;
}
}
logger.info("HEADER: signature=" + signature);
logger.info("HEADER: metadata=" + metadata);
logger.info("HEADER: expires=" + expires);
logger.info("HEADER: hostname=" + hostname);
logger.info("HEADER: Content-Length=" + contentLength);
QueryStringDecoder decoderQuery = new QueryStringDecoder(uri);
Map<String, List<String>> uriAttributes = decoderQuery.parameters();
uuid = uriAttributes.get("uuid").get(0);
logger.info("URI: uuid=" + uuid);
UploadEntity uploadEntity = null;
try {
// Validate the request here
storageResource.validatePostUploadRequest(signature, metadata, expires, hostname, contentLength, uuid);
// create an upload entity. This will fail if entity already exists.
uploadEntity = storageResource.createUploadEntity(uuid, metadata, contentLength);
} catch (InvalidParameterValueException ex) {
logger.error("post request validation failed", ex);
responseContent.append(ex.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.BAD_REQUEST);
requestProcessed = true;
return;
}
if (uploadEntity == null) {
logger.error("Unable to create upload entity. An exception occurred.");
responseContent.append("Internal Server Error");
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
// set the base directory to download the file
DiskFileUpload.baseDirectory = uploadEntity.getInstallPathPrefix();
this.processTimeout = uploadEntity.getProcessTimeout();
logger.info("base directory: " + DiskFileUpload.baseDirectory);
try {
// initialize the decoder
decoder = new HttpPostRequestDecoder(factory, request);
} catch (ErrorDataDecoderException | IncompatibleDataDecoderException e) {
logger.error("exception while initialising the decoder", e);
responseContent.append(e.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
} else {
logger.warn("received a get request");
responseContent.append("only post requests are allowed");
writeResponse(ctx.channel(), HttpResponseStatus.BAD_REQUEST);
requestProcessed = true;
return;
}
}
// check if the decoder was constructed before
if (decoder != null) {
if (msg instanceof HttpContent) {
// New chunk is received
HttpContent chunk = (HttpContent) msg;
try {
decoder.offer(chunk);
} catch (ErrorDataDecoderException e) {
logger.error("data decoding exception", e);
responseContent.append(e.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
if (chunk instanceof LastHttpContent) {
writeResponse(ctx.channel(), readFileUploadData());
reset();
}
}
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpContent in project rest.li by linkedin.
the class RAPStreamResponseDecoder method channelRead0.
@Override
protected void channelRead0(final ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if (msg instanceof HttpResponse) {
HttpResponse m = (HttpResponse) msg;
_shouldCloseConnection = !HttpUtil.isKeepAlive(m);
if (HttpUtil.is100ContinueExpected(m)) {
ctx.writeAndFlush(CONTINUE).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!future.isSuccess()) {
ctx.fireExceptionCaught(future.cause());
}
}
});
}
if (!m.decoderResult().isSuccess()) {
ctx.fireExceptionCaught(m.decoderResult().cause());
return;
}
// remove chunked encoding.
if (HttpUtil.isTransferEncodingChunked(m)) {
HttpUtil.setTransferEncodingChunked(m, false);
}
Timeout<None> timeout = ctx.channel().attr(TIMEOUT_ATTR_KEY).getAndSet(null);
if (timeout == null) {
LOG.debug("dropped a response after channel inactive or exception had happened.");
return;
}
final TimeoutBufferedWriter writer = new TimeoutBufferedWriter(ctx, _maxContentLength, BUFFER_HIGH_WATER_MARK, BUFFER_LOW_WATER_MARK, timeout);
EntityStream entityStream = EntityStreams.newEntityStream(writer);
_chunkedMessageWriter = writer;
// Refactored duplicate code to new code pipeline.
StreamResponseBuilder builder = HttpMessageDecoders.ResponseDecoder.buildStreamResponse(m);
ctx.fireChannelRead(builder.build(entityStream));
} else if (msg instanceof HttpContent) {
HttpContent chunk = (HttpContent) msg;
TimeoutBufferedWriter currentWriter = _chunkedMessageWriter;
// Sanity check
if (currentWriter == null) {
throw new IllegalStateException("received " + HttpContent.class.getSimpleName() + " without " + HttpResponse.class.getSimpleName());
}
if (!chunk.decoderResult().isSuccess()) {
this.exceptionCaught(ctx, chunk.decoderResult().cause());
}
currentWriter.processHttpChunk(chunk);
if (chunk instanceof LastHttpContent) {
_chunkedMessageWriter = null;
}
} else {
// something must be wrong, but let's proceed so that
// handler after us has a chance to process it.
ctx.fireChannelRead(msg);
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpContent in project riposte by Nike-Inc.
the class RequestFilterHandlerTest method doChannelRead_does_nothing_and_returns_CONTINUE_when_msg_is_not_first_or_last_chunk.
@Test
public void doChannelRead_does_nothing_and_returns_CONTINUE_when_msg_is_not_first_or_last_chunk() throws Exception {
// given
HttpContent contentChunkMsg = mock(HttpContent.class);
// when
PipelineContinuationBehavior result = handlerSpy.doChannelRead(ctxMock, contentChunkMsg);
// then
assertThat(result).isEqualTo(CONTINUE);
verify(handlerSpy, never()).handleFilterLogic(any(), any(), any(), any(), any());
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpContent in project riposte by Nike-Inc.
the class ProcessFinalResponseOutputHandlerTest method write_does_nothing_to_finalContentLength_if_msg_is_HttpContent_but_state_is_null.
@Test
public void write_does_nothing_to_finalContentLength_if_msg_is_HttpContent_but_state_is_null() throws Exception {
// given
HttpContent msgMock = mock(HttpContent.class);
ByteBuf contentMock = mock(ByteBuf.class);
int contentBytes = (int) (Math.random() * 10000);
doReturn(contentMock).when(msgMock).content();
doReturn(contentBytes).when(contentMock).readableBytes();
doReturn(null).when(stateAttrMock).get();
assertThat(responseInfo.getFinalContentLength()).isNull();
// when
handler.write(ctxMock, msgMock, promiseMock);
// then
assertThat(responseInfo.getFinalContentLength()).isNull();
}
Aggregations