use of org.apache.hadoop.ipc.TestRpcBase.TestRpcService in project hadoop by apache.
the class TestNMAuditLogger method testNMAuditLoggerWithIP.
/**
* Test {@link NMAuditLogger} with IP set.
*/
@Test
public void testNMAuditLoggerWithIP() throws Exception {
Configuration conf = new Configuration();
RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class);
// Create server side implementation
MyTestRPCServer serverImpl = new MyTestRPCServer();
BlockingService service = TestRpcServiceProtos.TestProtobufRpcProto.newReflectiveBlockingService(serverImpl);
// start the IPC server
Server server = new RPC.Builder(conf).setProtocol(TestRpcBase.TestRpcService.class).setInstance(service).setBindAddress("0.0.0.0").setPort(0).setNumHandlers(5).setVerbose(true).build();
server.start();
InetSocketAddress addr = NetUtils.getConnectAddress(server);
// Make a client connection and test the audit log
TestRpcService proxy = RPC.getProxy(TestRpcService.class, TestProtocol.versionID, addr, conf);
// Start the testcase
TestProtos.EmptyRequestProto pingRequest = TestProtos.EmptyRequestProto.newBuilder().build();
proxy.ping(null, pingRequest);
server.stop();
RPC.stopProxy(proxy);
}
use of org.apache.hadoop.ipc.TestRpcBase.TestRpcService in project hadoop by apache.
the class TestReuseRpcConnections method verifyRetryPolicyReuseConnections.
private void verifyRetryPolicyReuseConnections(final RetryPolicy retryPolicy1, final RetryPolicy retryPolicy2, final RetryPolicy anotherRetryPolicy) throws Exception {
final Server server = setupTestServer(conf, 2);
final Configuration newConf = new Configuration(conf);
newConf.set(CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "");
Client client = null;
TestRpcService proxy1 = null;
TestRpcService proxy2 = null;
TestRpcService proxy3 = null;
try {
proxy1 = getClient(addr, newConf, retryPolicy1);
proxy1.ping(null, newEmptyRequest());
client = ProtobufRpcEngine.getClient(newConf);
final Set<ConnectionId> conns = client.getConnectionIds();
assertEquals("number of connections in cache is wrong", 1, conns.size());
/*
* another equivalent retry policy, reuse connection
*/
proxy2 = getClient(addr, newConf, retryPolicy2);
proxy2.ping(null, newEmptyRequest());
assertEquals("number of connections in cache is wrong", 1, conns.size());
/*
* different retry policy, create a new connection
*/
proxy3 = getClient(addr, newConf, anotherRetryPolicy);
proxy3.ping(null, newEmptyRequest());
assertEquals("number of connections in cache is wrong", 2, conns.size());
} finally {
server.stop();
// this is dirty, but clear out connection cache for next run
if (client != null) {
client.getConnectionIds().clear();
}
if (proxy1 != null) {
RPC.stopProxy(proxy1);
}
if (proxy2 != null) {
RPC.stopProxy(proxy2);
}
if (proxy3 != null) {
RPC.stopProxy(proxy3);
}
}
}
use of org.apache.hadoop.ipc.TestRpcBase.TestRpcService in project hadoop by apache.
the class TestRMAuditLogger method testRMAuditLoggerWithIP.
/**
* Test {@link RMAuditLogger} with IP set.
*/
@Test
public void testRMAuditLoggerWithIP() throws Exception {
Configuration conf = new Configuration();
RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class);
// Create server side implementation
MyTestRPCServer serverImpl = new MyTestRPCServer();
BlockingService service = TestRpcServiceProtos.TestProtobufRpcProto.newReflectiveBlockingService(serverImpl);
// start the IPC server
Server server = new RPC.Builder(conf).setProtocol(TestRpcService.class).setInstance(service).setBindAddress("0.0.0.0").setPort(0).setNumHandlers(5).setVerbose(true).build();
server.start();
InetSocketAddress addr = NetUtils.getConnectAddress(server);
// Make a client connection and test the audit log
TestRpcService proxy = RPC.getProxy(TestRpcService.class, TestProtocol.versionID, addr, conf);
// Start the testcase
TestProtos.EmptyRequestProto pingRequest = TestProtos.EmptyRequestProto.newBuilder().build();
proxy.ping(null, pingRequest);
server.stop();
RPC.stopProxy(proxy);
}
Aggregations