Search in sources :

Example 1 with SimpleChannelInboundHandler

use of org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler in project netty by netty.

the class DatagramUnicastTest method testSimpleSend0.

@SuppressWarnings("deprecation")
private void testSimpleSend0(Bootstrap sb, Bootstrap cb, ByteBuf buf, boolean bindClient, final byte[] bytes, int count) throws Throwable {
    final CountDownLatch latch = new CountDownLatch(count);
    sb.handler(new ChannelInitializer<Channel>() {

        @Override
        protected void initChannel(Channel ch) throws Exception {
            ch.pipeline().addLast(new SimpleChannelInboundHandler<DatagramPacket>() {

                @Override
                public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
                    ByteBuf buf = msg.content();
                    assertEquals(bytes.length, buf.readableBytes());
                    for (byte b : bytes) {
                        assertEquals(b, buf.readByte());
                    }
                    latch.countDown();
                }
            });
        }
    });
    cb.handler(new SimpleChannelInboundHandler<Object>() {

        @Override
        public void channelRead0(ChannelHandlerContext ctx, Object msgs) throws Exception {
        // Nothing will be sent.
        }
    });
    Channel sc = null;
    BindException bindFailureCause = null;
    for (int i = 0; i < 3; i++) {
        try {
            sc = sb.bind().sync().channel();
            break;
        } catch (Exception e) {
            if (e instanceof BindException) {
                logger.warn("Failed to bind to a free port; trying again", e);
                bindFailureCause = (BindException) e;
                refreshLocalAddress(sb);
            } else {
                throw e;
            }
        }
    }
    if (sc == null) {
        throw bindFailureCause;
    }
    Channel cc;
    if (bindClient) {
        cc = cb.bind().sync().channel();
    } else {
        cb.option(ChannelOption.DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION, true);
        cc = cb.register().sync().channel();
    }
    for (int i = 0; i < count; i++) {
        cc.write(new DatagramPacket(buf.retain().duplicate(), addr));
    }
    // release as we used buf.retain() before
    buf.release();
    cc.flush();
    assertTrue(latch.await(10, TimeUnit.SECONDS));
    sc.close().sync();
    cc.close().sync();
}
Also used : SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) Channel(io.netty.channel.Channel) BindException(java.net.BindException) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) CountDownLatch(java.util.concurrent.CountDownLatch) CompositeByteBuf(io.netty.buffer.CompositeByteBuf) ByteBuf(io.netty.buffer.ByteBuf) BindException(java.net.BindException) DatagramPacket(io.netty.channel.socket.DatagramPacket)

Example 2 with SimpleChannelInboundHandler

use of org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler in project netty by netty.

the class NioSocketChannelTest method testChannelReRegisterRead.

private static void testChannelReRegisterRead(final boolean sameEventLoop) throws Exception {
    final EventLoopGroup group = new NioEventLoopGroup(2);
    final CountDownLatch latch = new CountDownLatch(1);
    // Just some random bytes
    byte[] bytes = new byte[1024];
    PlatformDependent.threadLocalRandom().nextBytes(bytes);
    Channel sc = null;
    Channel cc = null;
    ServerBootstrap b = new ServerBootstrap();
    try {
        b.group(group).channel(NioServerSocketChannel.class).childOption(ChannelOption.SO_KEEPALIVE, true).childHandler(new ChannelInitializer<Channel>() {

            @Override
            protected void initChannel(Channel ch) throws Exception {
                ChannelPipeline pipeline = ch.pipeline();
                pipeline.addLast(new SimpleChannelInboundHandler<ByteBuf>() {

                    @Override
                    protected void channelRead0(ChannelHandlerContext ctx, ByteBuf byteBuf) {
                        // We was able to read something from the Channel after reregister.
                        latch.countDown();
                    }

                    @Override
                    public void channelActive(final ChannelHandlerContext ctx) throws Exception {
                        final EventLoop loop = group.next();
                        if (sameEventLoop) {
                            deregister(ctx, loop);
                        } else {
                            loop.execute(new Runnable() {

                                @Override
                                public void run() {
                                    deregister(ctx, loop);
                                }
                            });
                        }
                    }

                    private void deregister(ChannelHandlerContext ctx, final EventLoop loop) {
                        // As soon as the channel becomes active re-register it to another
                        // EventLoop. After this is done we should still receive the data that
                        // was written to the channel.
                        ctx.deregister().addListener(new ChannelFutureListener() {

                            @Override
                            public void operationComplete(ChannelFuture cf) {
                                Channel channel = cf.channel();
                                assertNotSame(loop, channel.eventLoop());
                                group.next().register(channel);
                            }
                        });
                    }
                });
            }
        });
        sc = b.bind(0).syncUninterruptibly().channel();
        Bootstrap bootstrap = new Bootstrap();
        bootstrap.group(group).channel(NioSocketChannel.class);
        bootstrap.handler(new ChannelInboundHandlerAdapter());
        cc = bootstrap.connect(sc.localAddress()).syncUninterruptibly().channel();
        cc.writeAndFlush(Unpooled.wrappedBuffer(bytes)).syncUninterruptibly();
        latch.await();
    } finally {
        if (cc != null) {
            cc.close();
        }
        if (sc != null) {
            sc.close();
        }
        group.shutdownGracefully();
    }
}
Also used : SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) ChannelFuture(io.netty.channel.ChannelFuture) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) Channel(io.netty.channel.Channel) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) CountDownLatch(java.util.concurrent.CountDownLatch) ByteBuf(io.netty.buffer.ByteBuf) ChannelFutureListener(io.netty.channel.ChannelFutureListener) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) ClosedChannelException(java.nio.channels.ClosedChannelException) ChannelPipeline(io.netty.channel.ChannelPipeline) EventLoopGroup(io.netty.channel.EventLoopGroup) EventLoop(io.netty.channel.EventLoop) Bootstrap(io.netty.bootstrap.Bootstrap) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) ChannelInboundHandlerAdapter(io.netty.channel.ChannelInboundHandlerAdapter)

Example 3 with SimpleChannelInboundHandler

use of org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler in project riposte by Nike-Inc.

the class StreamingAsyncHttpClient method prepChannelForDownstreamCall.

protected void prepChannelForDownstreamCall(ChannelPool pool, Channel ch, StreamingCallback callback, Deque<Span> distributedSpanStackToUse, Map<String, String> mdcContextToUse, boolean isSecureHttpsCall, boolean relaxedHttpsValidation, boolean performSubSpanAroundDownstreamCalls, long downstreamCallTimeoutMillis, ObjectHolder<Boolean> callActiveHolder, ObjectHolder<Boolean> lastChunkSentDownstreamHolder) throws SSLException, NoSuchAlgorithmException, KeyStoreException {
    ChannelHandler chunkSenderHandler = new SimpleChannelInboundHandler<HttpObject>() {

        @Override
        protected void channelRead0(ChannelHandlerContext downstreamCallCtx, HttpObject msg) throws Exception {
            try {
                // the call is fully processed should not trigger the behavior a second time.
                if (callActiveHolder.heldObject) {
                    if (msg instanceof LastHttpContent) {
                        lastChunkSentDownstreamHolder.heldObject = true;
                        if (performSubSpanAroundDownstreamCalls) {
                            // Complete the subspan.
                            runnableWithTracingAndMdc(() -> {
                                if (distributedSpanStackToUse == null || distributedSpanStackToUse.size() < 2)
                                    Tracer.getInstance().completeRequestSpan();
                                else
                                    Tracer.getInstance().completeSubSpan();
                            }, distributedSpanStackToUse, mdcContextToUse).run();
                        }
                    }
                    HttpObject msgToPass = msg;
                    if (msg instanceof HttpResponse) {
                        // We can't pass the original HttpResponse back to the callback due to intricacies of how
                        // Netty handles determining the last chunk. If we do, and the callback ends up writing
                        // the message out to the client (which happens during proxy routing for example), then
                        // msg's headers might get modified - potentially causing this channel pipeline to
                        // never send a LastHttpContent, which will in turn cause an indefinite hang.
                        HttpResponse origHttpResponse = (HttpResponse) msg;
                        HttpResponse httpResponse = (msg instanceof FullHttpResponse) ? new DefaultFullHttpResponse(origHttpResponse.getProtocolVersion(), origHttpResponse.getStatus(), ((FullHttpResponse) msg).content()) : new DefaultHttpResponse(origHttpResponse.getProtocolVersion(), origHttpResponse.getStatus());
                        httpResponse.headers().add(origHttpResponse.headers());
                        msgToPass = httpResponse;
                    }
                    callback.messageReceived(msgToPass);
                } else {
                    if (shouldLogBadMessagesAfterRequestFinishes) {
                        runnableWithTracingAndMdc(() -> logger.warn("Received HttpObject msg when call was not active: {}", String.valueOf(msg)), distributedSpanStackToUse, mdcContextToUse).run();
                    }
                }
            } finally {
                if (msg instanceof LastHttpContent) {
                    releaseChannelBackToPoolIfCallIsActive(ch, pool, callActiveHolder, "last content chunk sent", distributedSpanStackToUse, mdcContextToUse);
                }
            }
        }
    };
    Consumer<Throwable> doErrorHandlingConsumer = (cause) -> {
        Pair<Deque<Span>, Map<String, String>> originalThreadInfo = null;
        try {
            // Setup tracing and MDC so our log messages have the correct distributed trace info, etc.
            originalThreadInfo = linkTracingAndMdcToCurrentThread(distributedSpanStackToUse, mdcContextToUse);
            // call is fully processed should not trigger the behavior a second time.
            if (callActiveHolder.heldObject) {
                if (performSubSpanAroundDownstreamCalls) {
                    if (distributedSpanStackToUse == null || distributedSpanStackToUse.size() < 2)
                        Tracer.getInstance().completeRequestSpan();
                    else
                        Tracer.getInstance().completeSubSpan();
                }
                Tracer.getInstance().unregisterFromThread();
                if (cause instanceof Errors.NativeIoException) {
                    // NativeIoExceptions are often setup to not have stack traces which is bad for debugging.
                    // Wrap it in a NativeIoExceptionWrapper that maps to a 503 since this is likely a busted
                    // connection and a second attempt should work.
                    cause = new NativeIoExceptionWrapper("Caught a NativeIoException in the downstream streaming call pipeline. Wrapped it in a " + "NativeIoExceptionWrapper so that it maps to a 503 and provides a usable stack trace " + "in the logs.", (Errors.NativeIoException) cause);
                }
                callback.unrecoverableErrorOccurred(cause, true);
            } else {
                if (cause instanceof DownstreamIdleChannelTimeoutException) {
                    logger.debug("A channel used for downstream calls will be closed because it was idle too long. " + "This is normal behavior and does not indicate a downstream call failure: {}", cause.toString());
                } else {
                    logger.warn("Received exception in downstream call pipeline after the call was finished. " + "Not necessarily anything to worry about but in case it helps debugging the " + "exception was: {}", cause.toString());
                }
            }
        } finally {
            // Mark the channel as broken so it will be closed and removed from the pool when it is returned.
            markChannelAsBroken(ch);
            // Release it back to the pool if possible/necessary so the pool can do its usual cleanup.
            releaseChannelBackToPoolIfCallIsActive(ch, pool, callActiveHolder, "error received in downstream pipeline: " + cause.toString(), distributedSpanStackToUse, mdcContextToUse);
            // No matter what the cause is we want to make sure the channel is closed. Doing this raw ch.close()
            // here will catch the cases where this channel does not have an active call but still needs to be
            // closed (e.g. an idle channel timeout that happens in-between calls).
            ch.close();
            // Unhook the tracing and MDC stuff from this thread now that we're done.
            unlinkTracingAndMdcFromCurrentThread(originalThreadInfo);
        }
    };
    ChannelHandler errorHandler = new ChannelInboundHandlerAdapter() {

        @Override
        public void exceptionCaught(ChannelHandlerContext downstreamCallCtx, Throwable cause) throws Exception {
            doErrorHandlingConsumer.accept(cause);
        }

        @Override
        public void channelInactive(ChannelHandlerContext ctx) throws Exception {
            if (logger.isDebugEnabled()) {
                runnableWithTracingAndMdc(() -> logger.debug("Downstream channel closing. call_active={}, last_chunk_sent_downstream={}, channel_id={}", callActiveHolder.heldObject, lastChunkSentDownstreamHolder.heldObject, ctx.channel().toString()), distributedSpanStackToUse, mdcContextToUse).run();
            }
            // We only care if the channel was closed while the call was active.
            if (callActiveHolder.heldObject)
                doErrorHandlingConsumer.accept(new DownstreamChannelClosedUnexpectedlyException(ch));
            super.channelInactive(ctx);
        }
    };
    // Set up the HTTP client pipeline.
    ChannelPipeline p = ch.pipeline();
    List<String> registeredHandlerNames = p.names();
    // couldn't be removed at that time because it wasn't in the channel's eventLoop.
    if (registeredHandlerNames.contains(DOWNSTREAM_IDLE_CHANNEL_TIMEOUT_HANDLER_NAME)) {
        ChannelHandler idleHandler = p.get(DOWNSTREAM_IDLE_CHANNEL_TIMEOUT_HANDLER_NAME);
        if (idleHandler != null)
            p.remove(idleHandler);
    }
    if (debugChannelLifecycleLoggingEnabled && !registeredHandlerNames.contains(DEBUG_LOGGER_HANDLER_NAME)) {
        // Add the channel debug logger if desired.
        p.addFirst(DEBUG_LOGGER_HANDLER_NAME, new LoggingHandler(DOWNSTREAM_CLIENT_CHANNEL_DEBUG_LOGGER_NAME, LogLevel.DEBUG));
    }
    // Add/replace a downstream call timeout detector.
    addOrReplacePipelineHandler(new DownstreamIdleChannelTimeoutHandler(downstreamCallTimeoutMillis, () -> callActiveHolder.heldObject, true, "StreamingAsyncHttpClientChannel-call-timeout", distributedSpanStackToUse, mdcContextToUse), DOWNSTREAM_CALL_TIMEOUT_HANDLER_NAME, p, registeredHandlerNames);
    if (isSecureHttpsCall) {
        // SSL call. Make sure we add the SSL handler if necessary.
        if (!registeredHandlerNames.contains(SSL_HANDLER_NAME)) {
            if (clientSslCtx == null) {
                if (relaxedHttpsValidation) {
                    clientSslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build();
                } else {
                    TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
                    tmf.init((KeyStore) null);
                    clientSslCtx = SslContextBuilder.forClient().trustManager(tmf).build();
                }
            }
            p.addAfter(DOWNSTREAM_CALL_TIMEOUT_HANDLER_NAME, SSL_HANDLER_NAME, clientSslCtx.newHandler(ch.alloc()));
        }
    } else {
        // Not an SSL call. Remove the SSL handler if it's there.
        if (registeredHandlerNames.contains(SSL_HANDLER_NAME))
            p.remove(SSL_HANDLER_NAME);
    }
    // The HttpClientCodec handler deals with HTTP codec stuff so you don't have to. Set it up if it hasn't already
    // been setup, and inspect it to make sure it's in a "ready to handle a new request" state. Some rare
    // and currently unknown edgecases can cause us to hit this point with the HttpClientCodec in an unclean
    // state, and if we barrel forward without cleaning this up the call will fail.
    boolean pipelineContainsHttpClientCodec = registeredHandlerNames.contains(HTTP_CLIENT_CODEC_HANDLER_NAME);
    boolean existingHttpClientCodecIsInBadState = false;
    if (pipelineContainsHttpClientCodec) {
        HttpClientCodec currentCodec = (HttpClientCodec) p.get(HTTP_CLIENT_CODEC_HANDLER_NAME);
        int currentHttpClientCodecInboundState = determineHttpClientCodecInboundState(currentCodec);
        if (currentHttpClientCodecInboundState != 0) {
            runnableWithTracingAndMdc(() -> logger.warn("HttpClientCodec inbound state was not 0. It will be replaced with a fresh HttpClientCodec. " + "bad_httpclientcodec_inbound_state={}", currentHttpClientCodecInboundState), distributedSpanStackToUse, mdcContextToUse).run();
            existingHttpClientCodecIsInBadState = true;
        } else {
            int currentHttpClientCodecOutboundState = determineHttpClientCodecOutboundState(currentCodec);
            if (currentHttpClientCodecOutboundState != 0) {
                runnableWithTracingAndMdc(() -> logger.warn("HttpClientCodec outbound state was not 0. It will be replaced with a fresh HttpClientCodec. " + "bad_httpclientcodec_outbound_state={}", currentHttpClientCodecOutboundState), distributedSpanStackToUse, mdcContextToUse).run();
                existingHttpClientCodecIsInBadState = true;
            }
        }
    }
    // or replace it if it was in a bad state.
    if (!pipelineContainsHttpClientCodec || existingHttpClientCodecIsInBadState) {
        addOrReplacePipelineHandler(new HttpClientCodec(4096, 8192, 8192, true), HTTP_CLIENT_CODEC_HANDLER_NAME, p, registeredHandlerNames);
    }
    // Update the chunk sender handler and error handler to the newly created versions that know about the correct
    // callback, dtrace info, etc to use for this request.
    addOrReplacePipelineHandler(chunkSenderHandler, CHUNK_SENDER_HANDLER_NAME, p, registeredHandlerNames);
    addOrReplacePipelineHandler(errorHandler, ERROR_HANDLER_NAME, p, registeredHandlerNames);
}
Also used : AttributeKey(io.netty.util.AttributeKey) Span(com.nike.wingtips.Span) HttpHeaders(io.netty.handler.codec.http.HttpHeaders) DefaultThreadFactory(io.netty.util.concurrent.DefaultThreadFactory) ChannelInboundHandlerAdapter(io.netty.channel.ChannelInboundHandlerAdapter) LoggerFactory(org.slf4j.LoggerFactory) Random(java.util.Random) KeyStoreException(java.security.KeyStoreException) AsyncNettyHelper.unlinkTracingAndMdcFromCurrentThread(com.nike.riposte.util.AsyncNettyHelper.unlinkTracingAndMdcFromCurrentThread) HttpObject(io.netty.handler.codec.http.HttpObject) HttpClientCodec(io.netty.handler.codec.http.HttpClientCodec) InetAddress(java.net.InetAddress) ChannelPromise(io.netty.channel.ChannelPromise) Map(java.util.Map) ThreadFactory(java.util.concurrent.ThreadFactory) SocketChannel(io.netty.channel.socket.SocketChannel) HttpObjectDecoder(io.netty.handler.codec.http.HttpObjectDecoder) HttpRequest(io.netty.handler.codec.http.HttpRequest) TrustManagerFactory(javax.net.ssl.TrustManagerFactory) DOWNSTREAM_CALL_CONNECTION_SETUP_TIME_NANOS_REQUEST_ATTR_KEY(com.nike.riposte.server.handler.ProxyRouterEndpointExecutionHandler.DOWNSTREAM_CALL_CONNECTION_SETUP_TIME_NANOS_REQUEST_ATTR_KEY) DownstreamIdleChannelTimeoutException(com.nike.riposte.server.error.exception.DownstreamIdleChannelTimeoutException) ChannelHealthChecker(io.netty.channel.pool.ChannelHealthChecker) DownstreamChannelClosedUnexpectedlyException(com.nike.riposte.server.error.exception.DownstreamChannelClosedUnexpectedlyException) KeyStore(java.security.KeyStore) ChannelPipeline(io.netty.channel.ChannelPipeline) InetSocketAddress(java.net.InetSocketAddress) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) List(java.util.List) SSLException(javax.net.ssl.SSLException) AbstractChannelPoolHandler(io.netty.channel.pool.AbstractChannelPoolHandler) LogLevel(io.netty.handler.logging.LogLevel) ChannelAttributes(com.nike.riposte.server.channelpipeline.ChannelAttributes) DefaultHttpResponse(io.netty.handler.codec.http.DefaultHttpResponse) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) HttpObjectEncoder(io.netty.handler.codec.http.HttpObjectEncoder) DefaultFullHttpResponse(io.netty.handler.codec.http.DefaultFullHttpResponse) HttpResponse(io.netty.handler.codec.http.HttpResponse) ChannelPoolMap(io.netty.channel.pool.ChannelPoolMap) HttpProcessingState(com.nike.riposte.server.http.HttpProcessingState) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) HttpRequestEncoder(io.netty.handler.codec.http.HttpRequestEncoder) DownstreamIdleChannelTimeoutHandler(com.nike.riposte.client.asynchttp.netty.downstreampipeline.DownstreamIdleChannelTimeoutHandler) RequestInfo(com.nike.riposte.server.http.RequestInfo) ChannelOption(io.netty.channel.ChannelOption) LoggingHandler(io.netty.handler.logging.LoggingHandler) Tracer(com.nike.wingtips.Tracer) CompletableFuture(java.util.concurrent.CompletableFuture) Errors(io.netty.channel.unix.Errors) Deque(java.util.Deque) LastHttpContent(io.netty.handler.codec.http.LastHttpContent) EpollSocketChannel(io.netty.channel.epoll.EpollSocketChannel) AsyncNettyHelper.linkTracingAndMdcToCurrentThread(com.nike.riposte.util.AsyncNettyHelper.linkTracingAndMdcToCurrentThread) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) InsecureTrustManagerFactory(io.netty.handler.ssl.util.InsecureTrustManagerFactory) HttpRequestTracingUtils(com.nike.wingtips.http.HttpRequestTracingUtils) BiConsumer(java.util.function.BiConsumer) EpollEventLoopGroup(io.netty.channel.epoll.EpollEventLoopGroup) HttpContent(io.netty.handler.codec.http.HttpContent) Attribute(io.netty.util.Attribute) Logger(org.slf4j.Logger) EventLoopGroup(io.netty.channel.EventLoopGroup) CombinedChannelDuplexHandler(io.netty.channel.CombinedChannelDuplexHandler) SslContext(io.netty.handler.ssl.SslContext) Promise(io.netty.util.concurrent.Promise) HostnameResolutionException(com.nike.riposte.server.error.exception.HostnameResolutionException) Field(java.lang.reflect.Field) UnknownHostException(java.net.UnknownHostException) ChannelFuture(io.netty.channel.ChannelFuture) Epoll(io.netty.channel.epoll.Epoll) Consumer(java.util.function.Consumer) Channel(io.netty.channel.Channel) SimpleChannelPool(io.netty.channel.pool.SimpleChannelPool) Bootstrap(io.netty.bootstrap.Bootstrap) FullHttpResponse(io.netty.handler.codec.http.FullHttpResponse) WrapperException(com.nike.backstopper.exception.WrapperException) MDC(org.slf4j.MDC) SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) NativeIoExceptionWrapper(com.nike.riposte.server.error.exception.NativeIoExceptionWrapper) AsyncNettyHelper.runnableWithTracingAndMdc(com.nike.riposte.util.AsyncNettyHelper.runnableWithTracingAndMdc) ChannelPool(io.netty.channel.pool.ChannelPool) SslContextBuilder(io.netty.handler.ssl.SslContextBuilder) ChannelHandler(io.netty.channel.ChannelHandler) Pair(com.nike.internal.util.Pair) AbstractChannelPoolMap(io.netty.channel.pool.AbstractChannelPoolMap) Future(io.netty.util.concurrent.Future) DownstreamIdleChannelTimeoutHandler(com.nike.riposte.client.asynchttp.netty.downstreampipeline.DownstreamIdleChannelTimeoutHandler) LoggingHandler(io.netty.handler.logging.LoggingHandler) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) ChannelHandler(io.netty.channel.ChannelHandler) HttpClientCodec(io.netty.handler.codec.http.HttpClientCodec) Span(com.nike.wingtips.Span) HttpObject(io.netty.handler.codec.http.HttpObject) DefaultFullHttpResponse(io.netty.handler.codec.http.DefaultFullHttpResponse) FullHttpResponse(io.netty.handler.codec.http.FullHttpResponse) Pair(com.nike.internal.util.Pair) SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) DefaultFullHttpResponse(io.netty.handler.codec.http.DefaultFullHttpResponse) NativeIoExceptionWrapper(com.nike.riposte.server.error.exception.NativeIoExceptionWrapper) DefaultHttpResponse(io.netty.handler.codec.http.DefaultHttpResponse) DefaultFullHttpResponse(io.netty.handler.codec.http.DefaultFullHttpResponse) HttpResponse(io.netty.handler.codec.http.HttpResponse) FullHttpResponse(io.netty.handler.codec.http.FullHttpResponse) LastHttpContent(io.netty.handler.codec.http.LastHttpContent) ChannelPipeline(io.netty.channel.ChannelPipeline) Errors(io.netty.channel.unix.Errors) DefaultHttpResponse(io.netty.handler.codec.http.DefaultHttpResponse) TrustManagerFactory(javax.net.ssl.TrustManagerFactory) InsecureTrustManagerFactory(io.netty.handler.ssl.util.InsecureTrustManagerFactory) DownstreamChannelClosedUnexpectedlyException(com.nike.riposte.server.error.exception.DownstreamChannelClosedUnexpectedlyException) DownstreamIdleChannelTimeoutException(com.nike.riposte.server.error.exception.DownstreamIdleChannelTimeoutException) ChannelInboundHandlerAdapter(io.netty.channel.ChannelInboundHandlerAdapter)

Example 4 with SimpleChannelInboundHandler

use of org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler in project alibaba-mom by younfor.

the class Broker method bind.

/**
 * 启动broker之前的netty服务器处理
 * @param port
 * @throws Exception
 */
public void bind(int port) throws Exception {
    // 启动消费进度定时任务
    // storeSubscribe(true, 5000);
    // scanSendInfotMap();
    EventLoopGroup bossGroup = new NioEventLoopGroup();
    EventLoopGroup workerGroup = new NioEventLoopGroup(Runtime.getRuntime().availableProcessors() * 3);
    try {
        ServerBootstrap serverBootstrap = new ServerBootstrap();
        serverBootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).option(ChannelOption.SO_REUSEADDR, true).childOption(ChannelOption.TCP_NODELAY, true).option(ChannelOption.SO_BACKLOG, 1024 * 1024).childOption(ChannelOption.SO_KEEPALIVE, true).childHandler(new ChannelInitializer<Channel>() {

            @Override
            protected void initChannel(Channel ch) throws Exception {
                ch.pipeline().addLast(new RpcDecoder()).addLast(new RpcEncoder()).addLast(new SimpleChannelInboundHandler<Object>() {

                    @Override
                    protected void channelRead0(ChannelHandlerContext ctx, Object info) throws Exception {
                        if (InfoBodyConsumer.class.isInstance(info)) {
                            // 接受消费者订阅处理
                            processConsumer((InfoBodyConsumer) info, ctx);
                        } else if (ConsumeResult.class.isInstance(info)) {
                            // 收到消费者ConsumeResult
                            logger.debug(" 收到消费者ConsumeResult");
                            ConsumerGroup.confirmConsumer((ConsumeResult) info, ctx);
                        // confirmConsumer((ConsumeResult) info, ctx);
                        } else if (MessageSend.class.isInstance(info)) {
                            // System.out.println("收到消息");
                            MessageManager.recieveMsg((MessageSend) info, ctx);
                        // MessageManager.recieveMsg((LinkedBlockingQueue)info,ctx);
                        }
                    }

                    @Override
                    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
                        logger.error("broker 异常:");
                        // logger.error(cause.getMessage());
                        cause.printStackTrace();
                        ctx.close();
                    }
                });
            }
        });
        // 
        ChannelFuture future = serverBootstrap.bind(port).sync();
        logger.debug("mom服务启动成功...... 绑定端口" + port);
        // 等待服务端监听端口关闭
        future.channel().closeFuture().sync();
    } catch (InterruptedException e) {
        logger.error("smom服务抛出异常  " + e.getMessage());
    } finally {
        // 优雅退出 释放线程池资源
        bossGroup.shutdownGracefully();
        workerGroup.shutdownGracefully();
        logger.debug("mom服务优雅的释放了线程资源...");
    }
}
Also used : SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) ChannelFuture(io.netty.channel.ChannelFuture) RpcEncoder(com.alibaba.middleware.race.mom.serializer.RpcEncoder) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) Channel(io.netty.channel.Channel) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) RpcDecoder(com.alibaba.middleware.race.mom.serializer.RpcDecoder) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) EventLoopGroup(io.netty.channel.EventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup)

Example 5 with SimpleChannelInboundHandler

use of org.apache.flink.shaded.netty4.io.netty.channel.SimpleChannelInboundHandler in project pinpoint by naver.

the class NettyIT method writeTest.

@Test
public void writeTest() throws Exception {
    final CountDownLatch awaitLatch = new CountDownLatch(1);
    EventLoopGroup workerGroup = new NioEventLoopGroup(2);
    Bootstrap bootstrap = client(workerGroup);
    final ChannelFuture connect = bootstrap.connect(webServer.getHostname(), webServer.getListeningPort());
    connect.addListener(new ChannelFutureListener() {

        @Override
        public void operationComplete(ChannelFuture future) throws Exception {
            if (future.isSuccess()) {
                Channel channel = future.channel();
                channel.pipeline().addLast(new SimpleChannelInboundHandler() {

                    @Override
                    protected void channelRead0(ChannelHandlerContext ctx, Object msg) {
                        awaitLatch.countDown();
                    }
                });
                HttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
                future.channel().writeAndFlush(request);
            }
        }
    });
    boolean await = awaitLatch.await(3000, TimeUnit.MILLISECONDS);
    Assert.assertTrue(await);
    final Channel channel = connect.channel();
    try {
        PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
        verifier.printCache();
        verifier.verifyTrace(event("NETTY", Bootstrap.class.getMethod("connect", SocketAddress.class), annotation("netty.address", webServer.getHostAndPort())));
        verifier.verifyTrace(event("NETTY", "io.netty.channel.DefaultChannelPromise.addListener(io.netty.util.concurrent.GenericFutureListener)"));
        verifier.verifyTrace(event("ASYNC", "Asynchronous Invocation"));
        verifier.verifyTrace(event("NETTY_INTERNAL", "io.netty.util.concurrent.DefaultPromise.notifyListenersNow()"));
        verifier.verifyTrace(event("NETTY_INTERNAL", "io.netty.util.concurrent.DefaultPromise.notifyListener0(io.netty.util.concurrent.Future, io.netty.util.concurrent.GenericFutureListener)"));
        verifier.verifyTrace(event("NETTY", "io.netty.channel.DefaultChannelPipeline.writeAndFlush(java.lang.Object)"));
        verifier.verifyTrace(event("NETTY_HTTP", "io.netty.handler.codec.http.HttpObjectEncoder.encode(io.netty.channel.ChannelHandlerContext, java.lang.Object, java.util.List)", annotation("http.url", "/")));
    } finally {
        channel.close().sync();
        workerGroup.shutdown();
    }
}
Also used : ChannelFuture(io.netty.channel.ChannelFuture) SimpleChannelInboundHandler(io.netty.channel.SimpleChannelInboundHandler) DefaultFullHttpRequest(io.netty.handler.codec.http.DefaultFullHttpRequest) HttpRequest(io.netty.handler.codec.http.HttpRequest) DefaultFullHttpRequest(io.netty.handler.codec.http.DefaultFullHttpRequest) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) SocketChannel(io.netty.channel.socket.SocketChannel) Channel(io.netty.channel.Channel) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) CountDownLatch(java.util.concurrent.CountDownLatch) ChannelFutureListener(io.netty.channel.ChannelFutureListener) PluginTestVerifier(com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier) EventLoopGroup(io.netty.channel.EventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Bootstrap(io.netty.bootstrap.Bootstrap) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Test(org.junit.Test)

Aggregations

ChannelHandlerContext (io.netty.channel.ChannelHandlerContext)33 SimpleChannelInboundHandler (io.netty.channel.SimpleChannelInboundHandler)33 Channel (io.netty.channel.Channel)26 Bootstrap (io.netty.bootstrap.Bootstrap)22 NioEventLoopGroup (io.netty.channel.nio.NioEventLoopGroup)21 SocketChannel (io.netty.channel.socket.SocketChannel)16 NioSocketChannel (io.netty.channel.socket.nio.NioSocketChannel)16 EventLoopGroup (io.netty.channel.EventLoopGroup)15 ServerBootstrap (io.netty.bootstrap.ServerBootstrap)11 ByteBuf (io.netty.buffer.ByteBuf)11 ChannelPipeline (io.netty.channel.ChannelPipeline)11 ChannelFuture (io.netty.channel.ChannelFuture)9 CountDownLatch (java.util.concurrent.CountDownLatch)9 NioServerSocketChannel (io.netty.channel.socket.nio.NioServerSocketChannel)8 InetSocketAddress (java.net.InetSocketAddress)8 ChannelFutureListener (io.netty.channel.ChannelFutureListener)7 FullHttpResponse (io.netty.handler.codec.http.FullHttpResponse)6 HttpRequest (io.netty.handler.codec.http.HttpRequest)6 IOException (java.io.IOException)6 DatagramPacket (io.netty.channel.socket.DatagramPacket)5