use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent in project cloudstack by apache.
the class HttpUploadServerHandler method channelRead0.
@Override
public void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if (msg instanceof HttpRequest) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("HTTP request: %s", msg));
}
HttpRequest request = this.request = (HttpRequest) msg;
responseContent.setLength(0);
if (request.getMethod().equals(HttpMethod.POST)) {
URI uri = new URI(request.getUri());
String signature = null;
String expires = null;
String metadata = null;
String hostname = null;
long contentLength = 0;
for (Entry<String, String> entry : request.headers()) {
switch(entry.getKey()) {
case HEADER_SIGNATURE:
signature = entry.getValue();
break;
case HEADER_METADATA:
metadata = entry.getValue();
break;
case HEADER_EXPIRES:
expires = entry.getValue();
break;
case HEADER_HOST:
hostname = entry.getValue();
break;
case HttpHeaders.Names.CONTENT_LENGTH:
contentLength = Long.parseLong(entry.getValue());
break;
}
}
logger.info("HEADER: signature=" + signature);
logger.info("HEADER: metadata=" + metadata);
logger.info("HEADER: expires=" + expires);
logger.info("HEADER: hostname=" + hostname);
logger.info("HEADER: Content-Length=" + contentLength);
QueryStringDecoder decoderQuery = new QueryStringDecoder(uri);
Map<String, List<String>> uriAttributes = decoderQuery.parameters();
uuid = uriAttributes.get("uuid").get(0);
logger.info("URI: uuid=" + uuid);
UploadEntity uploadEntity = null;
try {
// Validate the request here
storageResource.validatePostUploadRequest(signature, metadata, expires, hostname, contentLength, uuid);
// create an upload entity. This will fail if entity already exists.
uploadEntity = storageResource.createUploadEntity(uuid, metadata, contentLength);
} catch (InvalidParameterValueException ex) {
logger.error("post request validation failed", ex);
responseContent.append(ex.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.BAD_REQUEST);
requestProcessed = true;
return;
}
if (uploadEntity == null) {
logger.error("Unable to create upload entity. An exception occurred.");
responseContent.append("Internal Server Error");
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
// set the base directory to download the file
DiskFileUpload.baseDirectory = uploadEntity.getInstallPathPrefix();
this.processTimeout = uploadEntity.getProcessTimeout();
logger.info("base directory: " + DiskFileUpload.baseDirectory);
try {
// initialize the decoder
decoder = new HttpPostRequestDecoder(factory, request);
} catch (ErrorDataDecoderException | IncompatibleDataDecoderException e) {
logger.error("exception while initialising the decoder", e);
responseContent.append(e.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
} else {
logger.warn("received a get request");
responseContent.append("only post requests are allowed");
writeResponse(ctx.channel(), HttpResponseStatus.BAD_REQUEST);
requestProcessed = true;
return;
}
}
// check if the decoder was constructed before
if (decoder != null) {
if (msg instanceof HttpContent) {
// New chunk is received
HttpContent chunk = (HttpContent) msg;
try {
decoder.offer(chunk);
} catch (ErrorDataDecoderException e) {
logger.error("data decoding exception", e);
responseContent.append(e.getMessage());
writeResponse(ctx.channel(), HttpResponseStatus.INTERNAL_SERVER_ERROR);
requestProcessed = true;
return;
}
if (chunk instanceof LastHttpContent) {
writeResponse(ctx.channel(), readFileUploadData());
reset();
}
}
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent in project rest.li by linkedin.
the class RAPStreamResponseDecoder method channelRead0.
@Override
protected void channelRead0(final ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if (msg instanceof HttpResponse) {
HttpResponse m = (HttpResponse) msg;
_shouldCloseConnection = !HttpUtil.isKeepAlive(m);
if (HttpUtil.is100ContinueExpected(m)) {
ctx.writeAndFlush(CONTINUE).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!future.isSuccess()) {
ctx.fireExceptionCaught(future.cause());
}
}
});
}
if (!m.decoderResult().isSuccess()) {
ctx.fireExceptionCaught(m.decoderResult().cause());
return;
}
// remove chunked encoding.
if (HttpUtil.isTransferEncodingChunked(m)) {
HttpUtil.setTransferEncodingChunked(m, false);
}
Timeout<None> timeout = ctx.channel().attr(TIMEOUT_ATTR_KEY).getAndSet(null);
if (timeout == null) {
LOG.debug("dropped a response after channel inactive or exception had happened.");
return;
}
final TimeoutBufferedWriter writer = new TimeoutBufferedWriter(ctx, _maxContentLength, BUFFER_HIGH_WATER_MARK, BUFFER_LOW_WATER_MARK, timeout);
EntityStream entityStream = EntityStreams.newEntityStream(writer);
_chunkedMessageWriter = writer;
// Refactored duplicate code to new code pipeline.
StreamResponseBuilder builder = HttpMessageDecoders.ResponseDecoder.buildStreamResponse(m);
ctx.fireChannelRead(builder.build(entityStream));
} else if (msg instanceof HttpContent) {
HttpContent chunk = (HttpContent) msg;
TimeoutBufferedWriter currentWriter = _chunkedMessageWriter;
// Sanity check
if (currentWriter == null) {
throw new IllegalStateException("received " + HttpContent.class.getSimpleName() + " without " + HttpResponse.class.getSimpleName());
}
if (!chunk.decoderResult().isSuccess()) {
this.exceptionCaught(ctx, chunk.decoderResult().cause());
}
currentWriter.processHttpChunk(chunk);
if (chunk instanceof LastHttpContent) {
_chunkedMessageWriter = null;
}
} else {
// something must be wrong, but let's proceed so that
// handler after us has a chance to process it.
ctx.fireChannelRead(msg);
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent in project sidewinder by srotya.
the class HTTPDataPointDecoder method channelRead0.
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
try {
if (ResourceMonitor.getInstance().isReject()) {
logger.warning("Write rejected, insufficient memory");
if (writeResponse(request, ctx)) {
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
return;
}
if (msg instanceof HttpRequest) {
HttpRequest request = this.request = (HttpRequest) msg;
if (HttpUtil.is100ContinueExpected(request)) {
send100Continue(ctx);
}
QueryStringDecoder queryStringDecoder = new QueryStringDecoder(request.uri());
path = queryStringDecoder.path();
Map<String, List<String>> params = queryStringDecoder.parameters();
if (!params.isEmpty()) {
for (Entry<String, List<String>> p : params.entrySet()) {
String key = p.getKey();
if (key.equalsIgnoreCase("db")) {
dbName = p.getValue().get(0);
}
}
}
if (path != null && path.contains("query")) {
Gson gson = new Gson();
JsonObject obj = new JsonObject();
JsonArray ary = new JsonArray();
ary.add(new JsonObject());
obj.add("results", ary);
responseString.append(gson.toJson(obj));
}
}
if (msg instanceof HttpContent) {
HttpContent httpContent = (HttpContent) msg;
ByteBuf byteBuf = httpContent.content();
if (byteBuf.isReadable()) {
requestBuffer.append(byteBuf.toString(CharsetUtil.UTF_8));
}
if (msg instanceof LastHttpContent) {
if (dbName == null) {
responseString.append("Invalid database null");
logger.severe("Invalid database null");
} else {
String payload = requestBuffer.toString();
logger.fine("Request:" + payload);
List<Point> dps = InfluxDecoder.pointsFromString(dbName, payload);
meter.inc(dps.size());
for (Point dp : dps) {
try {
engine.writeDataPointWithLock(dp, false);
logger.fine("Accepted:" + dp + "\t" + new Date(dp.getTimestamp()));
} catch (IOException e) {
logger.fine("Dropped:" + dp + "\t" + e.getMessage());
responseString.append("Dropped:" + dp);
}
}
}
if (writeResponse(request, ctx)) {
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent in project apn-proxy by apn-proxy.
the class TestHttpClientHandler method channelRead0.
@Override
public void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
// }
if (msg instanceof HttpResponse) {
HttpResponse response = (HttpResponse) msg;
TestResultHolder.httpStatusCode(response.getStatus().code());
}
if (msg instanceof LastHttpContent) {
ctx.close();
}
}
use of org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent in project vert.x by eclipse.
the class VertxHttpHandler method safeObject.
@Override
protected Object safeObject(Object msg, ByteBufAllocator allocator) throws Exception {
if (msg instanceof HttpContent) {
HttpContent content = (HttpContent) msg;
ByteBuf buf = content.content();
if (buf != Unpooled.EMPTY_BUFFER && buf.isDirect()) {
ByteBuf newBuf = safeBuffer(content, allocator);
if (msg instanceof LastHttpContent) {
LastHttpContent last = (LastHttpContent) msg;
return new AssembledLastHttpContent(newBuf, last.trailingHeaders(), last.getDecoderResult());
} else {
return new DefaultHttpContent(newBuf);
}
}
} else if (msg instanceof WebSocketFrame) {
ByteBuf payload = safeBuffer((WebSocketFrame) msg, allocator);
boolean isFinal = ((WebSocketFrame) msg).isFinalFragment();
FrameType frameType;
if (msg instanceof BinaryWebSocketFrame) {
frameType = FrameType.BINARY;
} else if (msg instanceof CloseWebSocketFrame) {
frameType = FrameType.CLOSE;
} else if (msg instanceof PingWebSocketFrame) {
frameType = FrameType.PING;
} else if (msg instanceof PongWebSocketFrame) {
frameType = FrameType.PONG;
} else if (msg instanceof TextWebSocketFrame) {
frameType = FrameType.TEXT;
} else if (msg instanceof ContinuationWebSocketFrame) {
frameType = FrameType.CONTINUATION;
} else {
throw new IllegalStateException("Unsupported websocket msg " + msg);
}
return new WebSocketFrameImpl(frameType, payload, isFinal);
}
return msg;
}
Aggregations