use of io.netty.channel.local.LocalAddress in project grpc-java by grpc.
the class Http2NettyLocalChannelTest method createChannelBuilder.
@Override
protected NettyChannelBuilder createChannelBuilder() {
NettyChannelBuilder builder = NettyChannelBuilder.forAddress(new LocalAddress("in-process-1")).negotiationType(NegotiationType.PLAINTEXT).channelType(LocalChannel.class).eventLoopGroup(eventLoopGroup).flowControlWindow(AbstractInteropTest.TEST_FLOW_CONTROL_WINDOW).maxInboundMessageSize(AbstractInteropTest.MAX_MESSAGE_SIZE);
// Disable the default census stats interceptor, use testing interceptor instead.
InternalNettyChannelBuilder.setStatsEnabled(builder, false);
return builder.intercept(createCensusStatsClientInterceptor());
}
use of io.netty.channel.local.LocalAddress in project grpc-java by grpc.
the class AbstractBenchmark method setup.
/**
* Initialize the environment for the executor.
*/
public void setup(ExecutorType clientExecutor, ExecutorType serverExecutor, MessageSize requestSize, MessageSize responseSize, FlowWindowSize windowSize, ChannelType channelType, int maxConcurrentStreams, int channelCount) throws Exception {
ServerCredentials serverCreds = InsecureServerCredentials.create();
NettyServerBuilder serverBuilder;
NettyChannelBuilder channelBuilder;
if (channelType == ChannelType.LOCAL) {
LocalAddress address = new LocalAddress("netty-e2e-benchmark");
serverBuilder = NettyServerBuilder.forAddress(address, serverCreds);
serverBuilder.channelType(LocalServerChannel.class);
channelBuilder = NettyChannelBuilder.forAddress(address);
channelBuilder.channelType(LocalChannel.class);
} else {
ServerSocket sock = new ServerSocket();
// Pick a port using an ephemeral socket.
sock.bind(new InetSocketAddress(BENCHMARK_ADDR, 0));
SocketAddress address = sock.getLocalSocketAddress();
sock.close();
serverBuilder = NettyServerBuilder.forAddress(address, serverCreds).channelType(NioServerSocketChannel.class);
channelBuilder = NettyChannelBuilder.forAddress(address).channelType(NioSocketChannel.class);
}
if (serverExecutor == ExecutorType.DIRECT) {
serverBuilder.directExecutor();
}
if (clientExecutor == ExecutorType.DIRECT) {
channelBuilder.directExecutor();
}
// Always use a different worker group from the client.
ThreadFactory serverThreadFactory = new DefaultThreadFactory("STF pool", true);
serverBuilder.workerEventLoopGroup(new NioEventLoopGroup(0, serverThreadFactory));
serverBuilder.bossEventLoopGroup(new NioEventLoopGroup(1, serverThreadFactory));
// Always set connection and stream window size to same value
serverBuilder.flowControlWindow(windowSize.bytes());
channelBuilder.flowControlWindow(windowSize.bytes());
channelBuilder.negotiationType(NegotiationType.PLAINTEXT);
serverBuilder.maxConcurrentCallsPerConnection(maxConcurrentStreams);
// Create buffers of the desired size for requests and responses.
PooledByteBufAllocator alloc = PooledByteBufAllocator.DEFAULT;
// Use a heap buffer for now, since MessageFramer doesn't know how to directly convert this
// into a WritableBuffer
// TODO(carl-mastrangelo): convert this into a regular buffer() call. See
// https://github.com/grpc/grpc-java/issues/2062#issuecomment-234646216
request = alloc.heapBuffer(requestSize.bytes());
request.writerIndex(request.capacity() - 1);
response = alloc.heapBuffer(responseSize.bytes());
response.writerIndex(response.capacity() - 1);
// Simple method that sends and receives NettyByteBuf
unaryMethod = MethodDescriptor.<ByteBuf, ByteBuf>newBuilder().setType(MethodType.UNARY).setFullMethodName("benchmark/unary").setRequestMarshaller(new ByteBufOutputMarshaller()).setResponseMarshaller(new ByteBufOutputMarshaller()).build();
pingPongMethod = unaryMethod.toBuilder().setType(MethodType.BIDI_STREAMING).setFullMethodName("benchmark/pingPong").build();
flowControlledStreaming = pingPongMethod.toBuilder().setFullMethodName("benchmark/flowControlledStreaming").build();
// Server implementation of unary & streaming methods
serverBuilder.addService(ServerServiceDefinition.builder(new ServiceDescriptor("benchmark", unaryMethod, pingPongMethod, flowControlledStreaming)).addMethod(unaryMethod, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
// no-op
message.release();
call.sendMessage(response.slice());
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
};
}
}).addMethod(pingPongMethod, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
message.release();
call.sendMessage(response.slice());
// Request next message
call.request(1);
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
};
}
}).addMethod(flowControlledStreaming, new ServerCallHandler<ByteBuf, ByteBuf>() {
@Override
public ServerCall.Listener<ByteBuf> startCall(final ServerCall<ByteBuf, ByteBuf> call, Metadata headers) {
call.sendHeaders(new Metadata());
call.request(1);
return new ServerCall.Listener<ByteBuf>() {
@Override
public void onMessage(ByteBuf message) {
message.release();
while (call.isReady()) {
call.sendMessage(response.slice());
}
// Request next message
call.request(1);
}
@Override
public void onHalfClose() {
call.close(Status.OK, new Metadata());
}
@Override
public void onCancel() {
}
@Override
public void onComplete() {
}
@Override
public void onReady() {
while (call.isReady()) {
call.sendMessage(response.slice());
}
}
};
}
}).build());
// Build and start the clients and servers
server = serverBuilder.build();
server.start();
channels = new ManagedChannel[channelCount];
ThreadFactory clientThreadFactory = new DefaultThreadFactory("CTF pool", true);
for (int i = 0; i < channelCount; i++) {
// Use a dedicated event-loop for each channel
channels[i] = channelBuilder.eventLoopGroup(new NioEventLoopGroup(1, clientThreadFactory)).build();
}
}
use of io.netty.channel.local.LocalAddress in project grpc-java by grpc.
the class ProtocolNegotiatorsTest method httpProxy_completes.
@Test
public void httpProxy_completes() throws Exception {
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
// ProxyHandler is incompatible with EmbeddedChannel because when channelRegistered() is called
// the channel is already active.
LocalAddress proxy = new LocalAddress("httpProxy_completes");
SocketAddress host = InetSocketAddress.createUnresolved("specialHost", 314);
ChannelInboundHandler mockHandler = mock(ChannelInboundHandler.class);
Channel serverChannel = new ServerBootstrap().group(elg).channel(LocalServerChannel.class).childHandler(mockHandler).bind(proxy).sync().channel();
ProtocolNegotiator nego = ProtocolNegotiators.httpProxy(proxy, null, null, ProtocolNegotiators.plaintext());
// normally NettyClientTransport will add WBAEH which kick start the ProtocolNegotiation,
// mocking the behavior using KickStartHandler.
ChannelHandler handler = new KickStartHandler(nego.newHandler(FakeGrpcHttp2ConnectionHandler.noopHandler()));
Channel channel = new Bootstrap().group(elg).channel(LocalChannel.class).handler(handler).register().sync().channel();
pipeline = channel.pipeline();
// Wait for initialization to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
channel.connect(host).sync();
serverChannel.close();
ArgumentCaptor<ChannelHandlerContext> contextCaptor = ArgumentCaptor.forClass(ChannelHandlerContext.class);
Mockito.verify(mockHandler).channelActive(contextCaptor.capture());
ChannelHandlerContext serverContext = contextCaptor.getValue();
final String golden = "isThisThingOn?";
ChannelFuture negotiationFuture = channel.writeAndFlush(bb(golden, channel));
// Wait for sending initial request to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
ArgumentCaptor<Object> objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler).channelRead(ArgumentMatchers.<ChannelHandlerContext>any(), objectCaptor.capture());
ByteBuf b = (ByteBuf) objectCaptor.getValue();
String request = b.toString(UTF_8);
b.release();
assertTrue("No trailing newline: " + request, request.endsWith("\r\n\r\n"));
assertTrue("No CONNECT: " + request, request.startsWith("CONNECT specialHost:314 "));
assertTrue("No host header: " + request, request.contains("host: specialHost:314"));
assertFalse(negotiationFuture.isDone());
serverContext.writeAndFlush(bb("HTTP/1.1 200 OK\r\n\r\n", serverContext.channel())).sync();
negotiationFuture.sync();
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler, times(2)).channelRead(ArgumentMatchers.<ChannelHandlerContext>any(), objectCaptor.capture());
b = (ByteBuf) objectCaptor.getAllValues().get(1);
// If we were using the real grpcHandler, this would have been the HTTP/2 preface
String preface = b.toString(UTF_8);
b.release();
assertEquals(golden, preface);
channel.close();
}
use of io.netty.channel.local.LocalAddress in project grpc-java by grpc.
the class ProtocolNegotiatorsTest method waitUntilActiveHandler_channelActive.
@Test
public void waitUntilActiveHandler_channelActive() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
WaitUntilActiveHandler handler = new WaitUntilActiveHandler(new ChannelHandlerAdapter() {
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
assertTrue(ctx.channel().isActive());
latch.countDown();
super.handlerAdded(ctx);
}
}, noopLogger);
LocalAddress addr = new LocalAddress("local");
ChannelFuture cf = new Bootstrap().channel(LocalChannel.class).handler(handler).group(group).register();
chan = cf.channel();
ChannelFuture sf = new ServerBootstrap().channel(LocalServerChannel.class).childHandler(new ChannelHandlerAdapter() {
}).group(group).bind(addr);
server = sf.channel();
sf.sync();
assertEquals(1, latch.getCount());
chan.connect(addr).sync();
chan.pipeline().fireUserEventTriggered(ProtocolNegotiationEvent.DEFAULT);
assertTrue(latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS));
assertNull(chan.pipeline().context(WaitUntilActiveHandler.class));
}
use of io.netty.channel.local.LocalAddress in project grpc-java by grpc.
the class ProtocolNegotiatorsTest method httpProxy_500.
@Test
public void httpProxy_500() throws Exception {
DefaultEventLoopGroup elg = new DefaultEventLoopGroup(1);
// ProxyHandler is incompatible with EmbeddedChannel because when channelRegistered() is called
// the channel is already active.
LocalAddress proxy = new LocalAddress("httpProxy_500");
SocketAddress host = InetSocketAddress.createUnresolved("specialHost", 314);
ChannelInboundHandler mockHandler = mock(ChannelInboundHandler.class);
Channel serverChannel = new ServerBootstrap().group(elg).channel(LocalServerChannel.class).childHandler(mockHandler).bind(proxy).sync().channel();
ProtocolNegotiator nego = ProtocolNegotiators.httpProxy(proxy, null, null, ProtocolNegotiators.plaintext());
// normally NettyClientTransport will add WBAEH which kick start the ProtocolNegotiation,
// mocking the behavior using KickStartHandler.
ChannelHandler handler = new KickStartHandler(nego.newHandler(FakeGrpcHttp2ConnectionHandler.noopHandler()));
Channel channel = new Bootstrap().group(elg).channel(LocalChannel.class).handler(handler).register().sync().channel();
pipeline = channel.pipeline();
// Wait for initialization to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
channel.connect(host).sync();
serverChannel.close();
ArgumentCaptor<ChannelHandlerContext> contextCaptor = ArgumentCaptor.forClass(ChannelHandlerContext.class);
Mockito.verify(mockHandler).channelActive(contextCaptor.capture());
ChannelHandlerContext serverContext = contextCaptor.getValue();
final String golden = "isThisThingOn?";
ChannelFuture negotiationFuture = channel.writeAndFlush(bb(golden, channel));
// Wait for sending initial request to complete
channel.eventLoop().submit(NOOP_RUNNABLE).sync();
ArgumentCaptor<Object> objectCaptor = ArgumentCaptor.forClass(Object.class);
Mockito.verify(mockHandler).channelRead(any(ChannelHandlerContext.class), objectCaptor.capture());
ByteBuf request = (ByteBuf) objectCaptor.getValue();
request.release();
assertFalse(negotiationFuture.isDone());
String response = "HTTP/1.1 500 OMG\r\nContent-Length: 4\r\n\r\noops";
serverContext.writeAndFlush(bb(response, serverContext.channel())).sync();
thrown.expect(ProxyConnectException.class);
try {
negotiationFuture.sync();
} finally {
channel.close();
}
}
Aggregations