Search in sources :

Example 1 with ConnectionId

use of org.apache.hadoop.ipc.Client.ConnectionId in project hadoop by apache.

the class TestReuseRpcConnections method verifyRetryPolicyReuseConnections.

private void verifyRetryPolicyReuseConnections(final RetryPolicy retryPolicy1, final RetryPolicy retryPolicy2, final RetryPolicy anotherRetryPolicy) throws Exception {
    final Server server = setupTestServer(conf, 2);
    final Configuration newConf = new Configuration(conf);
    newConf.set(CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "");
    Client client = null;
    TestRpcService proxy1 = null;
    TestRpcService proxy2 = null;
    TestRpcService proxy3 = null;
    try {
        proxy1 = getClient(addr, newConf, retryPolicy1);
        proxy1.ping(null, newEmptyRequest());
        client = ProtobufRpcEngine.getClient(newConf);
        final Set<ConnectionId> conns = client.getConnectionIds();
        assertEquals("number of connections in cache is wrong", 1, conns.size());
        /*
       * another equivalent retry policy, reuse connection
       */
        proxy2 = getClient(addr, newConf, retryPolicy2);
        proxy2.ping(null, newEmptyRequest());
        assertEquals("number of connections in cache is wrong", 1, conns.size());
        /*
       * different retry policy, create a new connection
       */
        proxy3 = getClient(addr, newConf, anotherRetryPolicy);
        proxy3.ping(null, newEmptyRequest());
        assertEquals("number of connections in cache is wrong", 2, conns.size());
    } finally {
        server.stop();
        // this is dirty, but clear out connection cache for next run
        if (client != null) {
            client.getConnectionIds().clear();
        }
        if (proxy1 != null) {
            RPC.stopProxy(proxy1);
        }
        if (proxy2 != null) {
            RPC.stopProxy(proxy2);
        }
        if (proxy3 != null) {
            RPC.stopProxy(proxy3);
        }
    }
}
Also used : ConnectionId(org.apache.hadoop.ipc.Client.ConnectionId) Configuration(org.apache.hadoop.conf.Configuration) TestRpcService(org.apache.hadoop.ipc.TestRpcBase.TestRpcService)

Example 2 with ConnectionId

use of org.apache.hadoop.ipc.Client.ConnectionId in project hadoop by apache.

the class TestIPC method call.

static Writable call(Client client, InetSocketAddress addr, int serviceClass, Configuration conf) throws IOException {
    final LongWritable param = new LongWritable(RANDOM.nextLong());
    final ConnectionId remoteId = getConnectionId(addr, MIN_SLEEP_TIME, conf);
    return client.call(RPC.RpcKind.RPC_BUILTIN, param, remoteId, serviceClass, null);
}
Also used : ConnectionId(org.apache.hadoop.ipc.Client.ConnectionId) LongWritable(org.apache.hadoop.io.LongWritable)

Example 3 with ConnectionId

use of org.apache.hadoop.ipc.Client.ConnectionId in project hadoop by apache.

the class TestSaslRPC method testPerConnectionConf.

@Test
public void testPerConnectionConf() throws Exception {
    TestTokenSecretManager sm = new TestTokenSecretManager();
    final Server server = setupTestServer(conf, 5, sm);
    final UserGroupInformation current = UserGroupInformation.getCurrentUser();
    final InetSocketAddress addr = NetUtils.getConnectAddress(server);
    TestTokenIdentifier tokenId = new TestTokenIdentifier(new Text(current.getUserName()));
    Token<TestTokenIdentifier> token = new Token<>(tokenId, sm);
    SecurityUtil.setTokenService(token, addr);
    current.addToken(token);
    Configuration newConf = new Configuration(conf);
    newConf.set(CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "");
    Client client = null;
    TestRpcService proxy1 = null;
    TestRpcService proxy2 = null;
    TestRpcService proxy3 = null;
    int[] timeouts = { 111222, 3333333 };
    try {
        newConf.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, timeouts[0]);
        proxy1 = getClient(addr, newConf);
        proxy1.getAuthMethod(null, newEmptyRequest());
        client = ProtobufRpcEngine.getClient(newConf);
        Set<ConnectionId> conns = client.getConnectionIds();
        assertEquals("number of connections in cache is wrong", 1, conns.size());
        // same conf, connection should be re-used
        proxy2 = getClient(addr, newConf);
        proxy2.getAuthMethod(null, newEmptyRequest());
        assertEquals("number of connections in cache is wrong", 1, conns.size());
        // different conf, new connection should be set up
        newConf.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, timeouts[1]);
        proxy3 = getClient(addr, newConf);
        proxy3.getAuthMethod(null, newEmptyRequest());
        assertEquals("number of connections in cache is wrong", 2, conns.size());
        // now verify the proxies have the correct connection ids and timeouts
        ConnectionId[] connsArray = { RPC.getConnectionIdForProxy(proxy1), RPC.getConnectionIdForProxy(proxy2), RPC.getConnectionIdForProxy(proxy3) };
        assertEquals(connsArray[0], connsArray[1]);
        assertEquals(connsArray[0].getMaxIdleTime(), timeouts[0]);
        assertFalse(connsArray[0].equals(connsArray[2]));
        assertNotSame(connsArray[2].getMaxIdleTime(), timeouts[1]);
    } finally {
        server.stop();
        // this is dirty, but clear out connection cache for next run
        if (client != null) {
            client.getConnectionIds().clear();
        }
        if (proxy1 != null)
            RPC.stopProxy(proxy1);
        if (proxy2 != null)
            RPC.stopProxy(proxy2);
        if (proxy3 != null)
            RPC.stopProxy(proxy3);
    }
}
Also used : SaslServer(javax.security.sasl.SaslServer) Configuration(org.apache.hadoop.conf.Configuration) InetSocketAddress(java.net.InetSocketAddress) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) ConnectionId(org.apache.hadoop.ipc.Client.ConnectionId) SaslClient(javax.security.sasl.SaslClient) Test(org.junit.Test)

Example 4 with ConnectionId

use of org.apache.hadoop.ipc.Client.ConnectionId in project hadoop by apache.

the class TestSaslRPC method testPingInterval.

@Test
public void testPingInterval() throws Exception {
    Configuration newConf = new Configuration(conf);
    newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1);
    conf.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT);
    // set doPing to true
    newConf.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true);
    ConnectionId remoteId = ConnectionId.getConnectionId(new InetSocketAddress(0), TestRpcService.class, null, 0, null, newConf);
    assertEquals(CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT, remoteId.getPingInterval());
    // set doPing to false
    newConf.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, false);
    remoteId = ConnectionId.getConnectionId(new InetSocketAddress(0), TestRpcService.class, null, 0, null, newConf);
    assertEquals(0, remoteId.getPingInterval());
}
Also used : ConnectionId(org.apache.hadoop.ipc.Client.ConnectionId) Configuration(org.apache.hadoop.conf.Configuration) InetSocketAddress(java.net.InetSocketAddress) Test(org.junit.Test)

Aggregations

ConnectionId (org.apache.hadoop.ipc.Client.ConnectionId)4 Configuration (org.apache.hadoop.conf.Configuration)3 InetSocketAddress (java.net.InetSocketAddress)2 Test (org.junit.Test)2 SaslClient (javax.security.sasl.SaslClient)1 SaslServer (javax.security.sasl.SaslServer)1 LongWritable (org.apache.hadoop.io.LongWritable)1 Text (org.apache.hadoop.io.Text)1 TestRpcService (org.apache.hadoop.ipc.TestRpcBase.TestRpcService)1 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)1