Search in sources :

Example 16 with EventLoopGroup

use of org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup in project pulsar by yahoo.

the class PerformanceProducer method main.

public static void main(String[] args) throws Exception {
    final Arguments arguments = new Arguments();
    JCommander jc = new JCommander(arguments);
    jc.setProgramName("pulsar-perf-producer");
    try {
        jc.parse(args);
    } catch (ParameterException e) {
        System.out.println(e.getMessage());
        jc.usage();
        System.exit(-1);
    }
    if (arguments.help) {
        jc.usage();
        System.exit(-1);
    }
    if (arguments.destinations.size() != 1) {
        System.out.println("Only one topic name is allowed");
        jc.usage();
        System.exit(-1);
    }
    if (arguments.confFile != null) {
        Properties prop = new Properties(System.getProperties());
        prop.load(new FileInputStream(arguments.confFile));
        if (arguments.serviceURL == null) {
            arguments.serviceURL = prop.getProperty("brokerServiceUrl");
        }
        if (arguments.serviceURL == null) {
            arguments.serviceURL = prop.getProperty("webServiceUrl");
        }
        // fallback to previous-version serviceUrl property to maintain backward-compatibility
        if (arguments.serviceURL == null) {
            arguments.serviceURL = prop.getProperty("serviceUrl", "http://localhost:8080/");
        }
        if (arguments.authPluginClassName == null) {
            arguments.authPluginClassName = prop.getProperty("authPlugin", null);
        }
        if (arguments.authParams == null) {
            arguments.authParams = prop.getProperty("authParams", null);
        }
    }
    arguments.testTime = TimeUnit.SECONDS.toMillis(arguments.testTime);
    // Dump config variables
    ObjectMapper m = new ObjectMapper();
    ObjectWriter w = m.writerWithDefaultPrettyPrinter();
    log.info("Starting Pulsar perf producer with config: {}", w.writeValueAsString(arguments));
    // Read payload data from file if needed
    byte[] payloadData;
    if (arguments.payloadFilename != null) {
        payloadData = Files.readAllBytes(Paths.get(arguments.payloadFilename));
    } else {
        payloadData = new byte[arguments.msgSize];
    }
    // Now processing command line arguments
    String prefixTopicName = arguments.destinations.get(0);
    List<Future<Producer>> futures = Lists.newArrayList();
    EventLoopGroup eventLoopGroup;
    if (SystemUtils.IS_OS_LINUX) {
        eventLoopGroup = new EpollEventLoopGroup(Runtime.getRuntime().availableProcessors(), new DefaultThreadFactory("pulsar-perf-producer"));
    } else {
        eventLoopGroup = new NioEventLoopGroup(Runtime.getRuntime().availableProcessors(), new DefaultThreadFactory("pulsar-perf-producer"));
    }
    ClientConfiguration clientConf = new ClientConfiguration();
    clientConf.setConnectionsPerBroker(arguments.maxConnections);
    clientConf.setStatsInterval(arguments.statsIntervalSeconds, TimeUnit.SECONDS);
    if (isNotBlank(arguments.authPluginClassName)) {
        clientConf.setAuthentication(arguments.authPluginClassName, arguments.authParams);
    }
    PulsarClient client = new PulsarClientImpl(arguments.serviceURL, clientConf, eventLoopGroup);
    ProducerConfiguration producerConf = new ProducerConfiguration();
    producerConf.setSendTimeout(0, TimeUnit.SECONDS);
    producerConf.setCompressionType(arguments.compression);
    // enable round robin message routing if it is a partitioned topic
    producerConf.setMessageRoutingMode(MessageRoutingMode.RoundRobinPartition);
    if (arguments.batchTime > 0) {
        producerConf.setBatchingMaxPublishDelay(arguments.batchTime, TimeUnit.MILLISECONDS);
        producerConf.setBatchingEnabled(true);
        producerConf.setMaxPendingMessages(arguments.msgRate);
    }
    for (int i = 0; i < arguments.numTopics; i++) {
        String topic = (arguments.numTopics == 1) ? prefixTopicName : String.format("%s-%d", prefixTopicName, i);
        log.info("Adding {} publishers on destination {}", arguments.numProducers, topic);
        for (int j = 0; j < arguments.numProducers; j++) {
            futures.add(client.createProducerAsync(topic, producerConf));
        }
    }
    final List<Producer> producers = Lists.newArrayListWithCapacity(futures.size());
    for (Future<Producer> future : futures) {
        producers.add(future.get());
    }
    log.info("Created {} producers", producers.size());
    Runtime.getRuntime().addShutdownHook(new Thread() {

        public void run() {
            printAggregatedStats();
        }
    });
    Collections.shuffle(producers);
    AtomicBoolean isDone = new AtomicBoolean();
    executor.submit(() -> {
        try {
            RateLimiter rateLimiter = RateLimiter.create(arguments.msgRate);
            long startTime = System.currentTimeMillis();
            // Send messages on all topics/producers
            long totalSent = 0;
            while (true) {
                for (Producer producer : producers) {
                    if (arguments.testTime > 0) {
                        if (System.currentTimeMillis() - startTime > arguments.testTime) {
                            log.info("------------------- DONE -----------------------");
                            printAggregatedStats();
                            isDone.set(true);
                            Thread.sleep(5000);
                            System.exit(0);
                        }
                    }
                    if (arguments.numMessages > 0) {
                        if (totalSent++ >= arguments.numMessages) {
                            log.info("------------------- DONE -----------------------");
                            printAggregatedStats();
                            isDone.set(true);
                            Thread.sleep(5000);
                            System.exit(0);
                        }
                    }
                    rateLimiter.acquire();
                    final long sendTime = System.nanoTime();
                    producer.sendAsync(payloadData).thenRun(() -> {
                        messagesSent.increment();
                        bytesSent.add(payloadData.length);
                        long latencyMicros = NANOSECONDS.toMicros(System.nanoTime() - sendTime);
                        recorder.recordValue(latencyMicros);
                        cumulativeRecorder.recordValue(latencyMicros);
                    }).exceptionally(ex -> {
                        log.warn("Write error on message", ex);
                        System.exit(-1);
                        return null;
                    });
                }
            }
        } catch (Throwable t) {
            log.error("Got error", t);
        }
    });
    // Print report stats
    long oldTime = System.nanoTime();
    Histogram reportHistogram = null;
    String statsFileName = "perf-producer-" + System.currentTimeMillis() + ".hgrm";
    log.info("Dumping latency stats to {}", statsFileName);
    PrintStream histogramLog = new PrintStream(new FileOutputStream(statsFileName), false);
    HistogramLogWriter histogramLogWriter = new HistogramLogWriter(histogramLog);
    // Some log header bits
    histogramLogWriter.outputLogFormatVersion();
    histogramLogWriter.outputLegend();
    while (true) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            break;
        }
        if (isDone.get()) {
            break;
        }
        long now = System.nanoTime();
        double elapsed = (now - oldTime) / 1e9;
        double rate = messagesSent.sumThenReset() / elapsed;
        double throughput = bytesSent.sumThenReset() / elapsed / 1024 / 1024 * 8;
        reportHistogram = recorder.getIntervalHistogram(reportHistogram);
        log.info("Throughput produced: {}  msg/s --- {} Mbit/s --- Latency: mean: {} ms - med: {} - 95pct: {} - 99pct: {} - 99.9pct: {} - 99.99pct: {} - Max: {}", throughputFormat.format(rate), throughputFormat.format(throughput), dec.format(reportHistogram.getMean() / 1000.0), dec.format(reportHistogram.getValueAtPercentile(50) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(95) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99.9) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99.99) / 1000.0), dec.format(reportHistogram.getMaxValue() / 1000.0));
        histogramLogWriter.outputIntervalHistogram(reportHistogram);
        reportHistogram.reset();
        oldTime = now;
    }
    client.close();
}
Also used : Histogram(org.HdrHistogram.Histogram) ProducerConfiguration(com.yahoo.pulsar.client.api.ProducerConfiguration) Properties(java.util.Properties) DefaultThreadFactory(io.netty.util.concurrent.DefaultThreadFactory) JCommander(com.beust.jcommander.JCommander) ParameterException(com.beust.jcommander.ParameterException) PulsarClient(com.yahoo.pulsar.client.api.PulsarClient) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) HistogramLogWriter(org.HdrHistogram.HistogramLogWriter) PrintStream(java.io.PrintStream) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) FileInputStream(java.io.FileInputStream) RateLimiter(com.google.common.util.concurrent.RateLimiter) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) EpollEventLoopGroup(io.netty.channel.epoll.EpollEventLoopGroup) EventLoopGroup(io.netty.channel.EventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Producer(com.yahoo.pulsar.client.api.Producer) EpollEventLoopGroup(io.netty.channel.epoll.EpollEventLoopGroup) FileOutputStream(java.io.FileOutputStream) Future(java.util.concurrent.Future) PulsarClientImpl(com.yahoo.pulsar.client.impl.PulsarClientImpl) ClientConfiguration(com.yahoo.pulsar.client.api.ClientConfiguration)

Example 17 with EventLoopGroup

use of org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup in project jersey by jersey.

the class NettyHttpContainerProvider method createServer.

/**
     * Create and start Netty server.
     *
     * @param baseUri       base uri.
     * @param configuration Jersey configuration.
     * @param sslContext    Netty SSL context (can be null).
     * @param block         when {@code true}, this method will block until the server is stopped. When {@code false}, the
     *                      execution will
     *                      end immediately after the server is started.
     * @return Netty channel instance.
     * @throws ProcessingException when there is an issue with creating new container.
     */
public static Channel createServer(final URI baseUri, final ResourceConfig configuration, SslContext sslContext, final boolean block) throws ProcessingException {
    // Configure the server.
    final EventLoopGroup bossGroup = new NioEventLoopGroup(1);
    final EventLoopGroup workerGroup = new NioEventLoopGroup();
    final NettyHttpContainer container = new NettyHttpContainer(configuration);
    try {
        ServerBootstrap b = new ServerBootstrap();
        b.option(ChannelOption.SO_BACKLOG, 1024);
        b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(new JerseyServerInitializer(baseUri, sslContext, container));
        int port = getPort(baseUri);
        Channel ch = b.bind(port).sync().channel();
        ch.closeFuture().addListener(new GenericFutureListener<Future<? super Void>>() {

            @Override
            public void operationComplete(Future<? super Void> future) throws Exception {
                container.getApplicationHandler().onShutdown(container);
                bossGroup.shutdownGracefully();
                workerGroup.shutdownGracefully();
            }
        });
        if (block) {
            ch.closeFuture().sync();
            return ch;
        } else {
            return ch;
        }
    } catch (InterruptedException e) {
        throw new ProcessingException(e);
    }
}
Also used : NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) Channel(io.netty.channel.Channel) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) ProcessingException(javax.ws.rs.ProcessingException) EventLoopGroup(io.netty.channel.EventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Future(io.netty.util.concurrent.Future) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) ProcessingException(javax.ws.rs.ProcessingException)

Example 18 with EventLoopGroup

use of org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup in project netty by netty.

the class Http2StreamChannelBootstrap method connect.

/**
     * Used by the {@link Http2MultiplexCodec} to instantiate incoming/remotely-created streams.
     */
ChannelFuture connect(int streamId) {
    validateState();
    ParentChannelAndMultiplexCodec channelAndCodec0 = channelAndCodec;
    Channel parentChannel = channelAndCodec0.parentChannel;
    Http2MultiplexCodec multiplexCodec = channelAndCodec0.multiplexCodec;
    EventLoopGroup group0 = group;
    group0 = group0 == null ? parentChannel.eventLoop() : group0;
    return multiplexCodec.createStreamChannel(parentChannel, group0, handler, options, attributes, streamId);
}
Also used : EventLoopGroup(io.netty.channel.EventLoopGroup) Channel(io.netty.channel.Channel)

Example 19 with EventLoopGroup

use of org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup in project netty by netty.

the class SslHandlerTest method testCloseNotify.

private static void testCloseNotify(SslProvider provider, final long closeNotifyReadTimeout, final boolean timeout) throws Exception {
    SelfSignedCertificate ssc = new SelfSignedCertificate();
    final SslContext sslServerCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).sslProvider(provider).build();
    final SslContext sslClientCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).sslProvider(provider).build();
    EventLoopGroup group = new NioEventLoopGroup();
    Channel sc = null;
    Channel cc = null;
    try {
        final Promise<Channel> clientPromise = group.next().newPromise();
        final Promise<Channel> serverPromise = group.next().newPromise();
        sc = new ServerBootstrap().group(group).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<Channel>() {

            @Override
            protected void initChannel(Channel ch) throws Exception {
                SslHandler handler = sslServerCtx.newHandler(ch.alloc());
                handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
                handler.sslCloseFuture().addListener(new PromiseNotifier<Channel, Future<Channel>>(serverPromise));
                handler.handshakeFuture().addListener(new FutureListener<Channel>() {

                    @Override
                    public void operationComplete(Future<Channel> future) {
                        if (!future.isSuccess()) {
                            // Something bad happened during handshake fail the promise!
                            serverPromise.tryFailure(future.cause());
                        }
                    }
                });
                ch.pipeline().addLast(handler);
            }
        }).bind(new InetSocketAddress(0)).syncUninterruptibly().channel();
        cc = new Bootstrap().group(group).channel(NioSocketChannel.class).handler(new ChannelInitializer<Channel>() {

            @Override
            protected void initChannel(Channel ch) throws Exception {
                final AtomicBoolean closeSent = new AtomicBoolean();
                if (timeout) {
                    ch.pipeline().addFirst(new ChannelInboundHandlerAdapter() {

                        @Override
                        public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
                            if (closeSent.get()) {
                                // Drop data on the floor so we will get a timeout while waiting for the
                                // close_notify.
                                ReferenceCountUtil.release(msg);
                            } else {
                                super.channelRead(ctx, msg);
                            }
                        }
                    });
                }
                SslHandler handler = sslClientCtx.newHandler(ch.alloc());
                handler.setCloseNotifyReadTimeoutMillis(closeNotifyReadTimeout);
                handler.sslCloseFuture().addListener(new PromiseNotifier<Channel, Future<Channel>>(clientPromise));
                handler.handshakeFuture().addListener(new FutureListener<Channel>() {

                    @Override
                    public void operationComplete(Future<Channel> future) {
                        if (future.isSuccess()) {
                            closeSent.compareAndSet(false, true);
                            future.getNow().close();
                        } else {
                            // Something bad happened during handshake fail the promise!
                            clientPromise.tryFailure(future.cause());
                        }
                    }
                });
                ch.pipeline().addLast(handler);
            }
        }).connect(sc.localAddress()).syncUninterruptibly().channel();
        serverPromise.awaitUninterruptibly();
        clientPromise.awaitUninterruptibly();
        // Server always received the close_notify as the client triggers the close sequence.
        assertTrue(serverPromise.isSuccess());
        // Depending on if we wait for the response or not the promise will be failed or not.
        if (closeNotifyReadTimeout > 0 && !timeout) {
            assertTrue(clientPromise.isSuccess());
        } else {
            assertFalse(clientPromise.isSuccess());
        }
    } finally {
        if (cc != null) {
            cc.close().syncUninterruptibly();
        }
        if (sc != null) {
            sc.close().syncUninterruptibly();
        }
        group.shutdownGracefully();
        ReferenceCountUtil.release(sslServerCtx);
        ReferenceCountUtil.release(sslClientCtx);
    }
}
Also used : FutureListener(io.netty.util.concurrent.FutureListener) ChannelFutureListener(io.netty.channel.ChannelFutureListener) SelfSignedCertificate(io.netty.handler.ssl.util.SelfSignedCertificate) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) InetSocketAddress(java.net.InetSocketAddress) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) EmbeddedChannel(io.netty.channel.embedded.EmbeddedChannel) Channel(io.netty.channel.Channel) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) PromiseNotifier(io.netty.util.concurrent.PromiseNotifier) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) IllegalReferenceCountException(io.netty.util.IllegalReferenceCountException) CodecException(io.netty.handler.codec.CodecException) SSLProtocolException(javax.net.ssl.SSLProtocolException) DecoderException(io.netty.handler.codec.DecoderException) SSLException(javax.net.ssl.SSLException) ClosedChannelException(java.nio.channels.ClosedChannelException) CertificateException(java.security.cert.CertificateException) ExecutionException(java.util.concurrent.ExecutionException) UnsupportedMessageTypeException(io.netty.handler.codec.UnsupportedMessageTypeException) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) EventLoopGroup(io.netty.channel.EventLoopGroup) ChannelFuture(io.netty.channel.ChannelFuture) Future(io.netty.util.concurrent.Future) Bootstrap(io.netty.bootstrap.Bootstrap) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) ChannelInboundHandlerAdapter(io.netty.channel.ChannelInboundHandlerAdapter)

Example 20 with EventLoopGroup

use of org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup in project netty by netty.

the class SniHandlerTest method testSniWithApnHandler.

@Test
public void testSniWithApnHandler() throws Exception {
    SslContext nettyContext = makeSslContext(provider, true);
    SslContext sniContext = makeSslContext(provider, true);
    final SslContext clientContext = makeSslClientContext(provider, true);
    try {
        final CountDownLatch serverApnDoneLatch = new CountDownLatch(1);
        final CountDownLatch clientApnDoneLatch = new CountDownLatch(1);
        final DomainNameMapping<SslContext> mapping = new DomainNameMappingBuilder<SslContext>(nettyContext).add("*.netty.io", nettyContext).add("sni.fake.site", sniContext).build();
        final SniHandler handler = new SniHandler(mapping);
        EventLoopGroup group = new NioEventLoopGroup(2);
        Channel serverChannel = null;
        Channel clientChannel = null;
        try {
            ServerBootstrap sb = new ServerBootstrap();
            sb.group(group);
            sb.channel(NioServerSocketChannel.class);
            sb.childHandler(new ChannelInitializer<Channel>() {

                @Override
                protected void initChannel(Channel ch) throws Exception {
                    ChannelPipeline p = ch.pipeline();
                    // Server side SNI.
                    p.addLast(handler);
                    // Catch the notification event that APN has completed successfully.
                    p.addLast(new ApplicationProtocolNegotiationHandler("foo") {

                        @Override
                        protected void configurePipeline(ChannelHandlerContext ctx, String protocol) {
                            serverApnDoneLatch.countDown();
                        }
                    });
                }
            });
            Bootstrap cb = new Bootstrap();
            cb.group(group);
            cb.channel(NioSocketChannel.class);
            cb.handler(new ChannelInitializer<Channel>() {

                @Override
                protected void initChannel(Channel ch) throws Exception {
                    ch.pipeline().addLast(new SslHandler(clientContext.newEngine(ch.alloc(), "sni.fake.site", -1)));
                    // Catch the notification event that APN has completed successfully.
                    ch.pipeline().addLast(new ApplicationProtocolNegotiationHandler("foo") {

                        @Override
                        protected void configurePipeline(ChannelHandlerContext ctx, String protocol) {
                            clientApnDoneLatch.countDown();
                        }
                    });
                }
            });
            serverChannel = sb.bind(new InetSocketAddress(0)).sync().channel();
            ChannelFuture ccf = cb.connect(serverChannel.localAddress());
            assertTrue(ccf.awaitUninterruptibly().isSuccess());
            clientChannel = ccf.channel();
            assertTrue(serverApnDoneLatch.await(5, TimeUnit.SECONDS));
            assertTrue(clientApnDoneLatch.await(5, TimeUnit.SECONDS));
            assertThat(handler.hostname(), is("sni.fake.site"));
            assertThat(handler.sslContext(), is(sniContext));
        } finally {
            if (serverChannel != null) {
                serverChannel.close().sync();
            }
            if (clientChannel != null) {
                clientChannel.close().sync();
            }
            group.shutdownGracefully(0, 0, TimeUnit.MICROSECONDS);
        }
    } finally {
        releaseAll(clientContext, nettyContext, sniContext);
    }
}
Also used : ChannelFuture(io.netty.channel.ChannelFuture) InetSocketAddress(java.net.InetSocketAddress) NioSocketChannel(io.netty.channel.socket.nio.NioSocketChannel) LocalServerChannel(io.netty.channel.local.LocalServerChannel) LocalChannel(io.netty.channel.local.LocalChannel) EmbeddedChannel(io.netty.channel.embedded.EmbeddedChannel) NioServerSocketChannel(io.netty.channel.socket.nio.NioServerSocketChannel) Channel(io.netty.channel.Channel) ChannelHandlerContext(io.netty.channel.ChannelHandlerContext) CountDownLatch(java.util.concurrent.CountDownLatch) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) DecoderException(io.netty.handler.codec.DecoderException) ChannelPipeline(io.netty.channel.ChannelPipeline) EventLoopGroup(io.netty.channel.EventLoopGroup) DefaultEventLoopGroup(io.netty.channel.DefaultEventLoopGroup) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Bootstrap(io.netty.bootstrap.Bootstrap) ServerBootstrap(io.netty.bootstrap.ServerBootstrap) NioEventLoopGroup(io.netty.channel.nio.NioEventLoopGroup) Test(org.junit.Test)

Aggregations

EventLoopGroup (io.netty.channel.EventLoopGroup)352 NioEventLoopGroup (io.netty.channel.nio.NioEventLoopGroup)259 ServerBootstrap (io.netty.bootstrap.ServerBootstrap)155 Bootstrap (io.netty.bootstrap.Bootstrap)131 Channel (io.netty.channel.Channel)127 ChannelFuture (io.netty.channel.ChannelFuture)117 NioServerSocketChannel (io.netty.channel.socket.nio.NioServerSocketChannel)103 NioSocketChannel (io.netty.channel.socket.nio.NioSocketChannel)88 SocketChannel (io.netty.channel.socket.SocketChannel)82 InetSocketAddress (java.net.InetSocketAddress)69 Test (org.junit.jupiter.api.Test)67 DefaultEventLoopGroup (io.netty.channel.DefaultEventLoopGroup)60 LoggingHandler (io.netty.handler.logging.LoggingHandler)53 ChannelInboundHandlerAdapter (io.netty.channel.ChannelInboundHandlerAdapter)50 ChannelHandlerContext (io.netty.channel.ChannelHandlerContext)49 SslContext (io.netty.handler.ssl.SslContext)48 ChannelPipeline (io.netty.channel.ChannelPipeline)47 SelfSignedCertificate (io.netty.handler.ssl.util.SelfSignedCertificate)45 LocalServerChannel (io.netty.channel.local.LocalServerChannel)42 LocalChannel (io.netty.channel.local.LocalChannel)40