Search in sources :

Example 1 with Connection

use of org.apache.hadoop.ipc.Server.Connection in project hadoop by apache.

the class TestRPC method testReaderExceptions.

@Test(timeout = 30000)
public void testReaderExceptions() throws Exception {
    Server server = null;
    TestRpcService proxy = null;
    // will attempt to return this exception from a reader with and w/o
    // the connection closing.
    IOException expectedIOE = new TestReaderException("testing123");
    @SuppressWarnings("serial") IOException rseError = new RpcServerException("keepalive", expectedIOE) {

        @Override
        public RpcStatusProto getRpcStatusProto() {
            return RpcStatusProto.ERROR;
        }
    };
    @SuppressWarnings("serial") IOException rseFatal = new RpcServerException("disconnect", expectedIOE) {

        @Override
        public RpcStatusProto getRpcStatusProto() {
            return RpcStatusProto.FATAL;
        }
    };
    try {
        RPC.Builder builder = newServerBuilder(conf).setQueueSizePerHandler(1).setNumHandlers(1).setVerbose(true);
        server = setupTestServer(builder);
        Whitebox.setInternalState(server, "rpcRequestClass", FakeRequestClass.class);
        MutableCounterLong authMetric = (MutableCounterLong) Whitebox.getInternalState(server.getRpcMetrics(), "rpcAuthorizationSuccesses");
        proxy = getClient(addr, conf);
        boolean isDisconnected = true;
        Connection lastConn = null;
        long expectedAuths = 0;
        // fuzz the client.
        for (int i = 0; i < 128; i++) {
            String reqName = "request[" + i + "]";
            int r = ThreadLocalRandom.current().nextInt();
            final boolean doDisconnect = r % 4 == 0;
            LOG.info("TestDisconnect request[" + i + "] " + " shouldConnect=" + isDisconnected + " willDisconnect=" + doDisconnect);
            if (isDisconnected) {
                expectedAuths++;
            }
            try {
                FakeRequestClass.exception = doDisconnect ? rseFatal : rseError;
                proxy.ping(null, newEmptyRequest());
                fail(reqName + " didn't fail");
            } catch (ServiceException e) {
                RemoteException re = (RemoteException) e.getCause();
                assertEquals(reqName, expectedIOE, re.unwrapRemoteException());
            }
            // check authorizations to ensure new connection when expected,
            // then conclusively determine if connections are disconnected
            // correctly.
            assertEquals(reqName, expectedAuths, authMetric.value());
            if (!doDisconnect) {
                // if it wasn't fatal, verify there's only one open connection.
                Connection[] conns = server.getConnections();
                assertEquals(reqName, 1, conns.length);
                // verify whether the connection should have been reused.
                if (isDisconnected) {
                    assertNotSame(reqName, lastConn, conns[0]);
                } else {
                    assertSame(reqName, lastConn, conns[0]);
                }
                lastConn = conns[0];
            } else if (lastConn != null) {
                // avoid race condition in server where connection may not be
                // fully removed yet.  just make sure it's marked for being closed.
                // the open connection checks above ensure correct behavior.
                assertTrue(reqName, lastConn.shouldClose());
            }
            isDisconnected = doDisconnect;
        }
    } finally {
        stop(server, proxy);
    }
}
Also used : MutableCounterLong(org.apache.hadoop.metrics2.lib.MutableCounterLong) Connection(org.apache.hadoop.ipc.Server.Connection) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) ServiceException(com.google.protobuf.ServiceException) Test(org.junit.Test)

Example 2 with Connection

use of org.apache.hadoop.ipc.Server.Connection in project hadoop by apache.

the class TestIPC method callAndVerify.

/**
   * Make a call from a client and verify if header info is changed in server side
   */
private static void callAndVerify(Server server, InetSocketAddress addr, int serviceClass, boolean noChanged) throws IOException {
    Client client = new Client(LongWritable.class, conf);
    call(client, addr, serviceClass, conf);
    Connection connection = server.getConnections()[0];
    int serviceClass2 = connection.getServiceClass();
    assertFalse(noChanged ^ serviceClass == serviceClass2);
    client.stop();
}
Also used : Connection(org.apache.hadoop.ipc.Server.Connection)

Example 3 with Connection

use of org.apache.hadoop.ipc.Server.Connection in project hadoop by apache.

the class TestSaslRPC method doDigestRpc.

private void doDigestRpc(Server server, TestTokenSecretManager sm) throws Exception {
    final UserGroupInformation current = UserGroupInformation.getCurrentUser();
    addr = NetUtils.getConnectAddress(server);
    TestTokenIdentifier tokenId = new TestTokenIdentifier(new Text(current.getUserName()));
    Token<TestTokenIdentifier> token = new Token<>(tokenId, sm);
    SecurityUtil.setTokenService(token, addr);
    current.addToken(token);
    TestRpcService proxy = null;
    try {
        proxy = getClient(addr, conf);
        AuthMethod authMethod = convert(proxy.getAuthMethod(null, newEmptyRequest()));
        assertEquals(TOKEN, authMethod);
        //QOP must be auth
        assertEquals(expectedQop.saslQop, RPC.getConnectionIdForProxy(proxy).getSaslQop());
        int n = 0;
        for (Connection connection : server.getConnections()) {
            // only qop auth should dispose of the sasl server
            boolean hasServer = (connection.saslServer != null);
            assertTrue("qop:" + expectedQop + " hasServer:" + hasServer, (expectedQop == QualityOfProtection.AUTHENTICATION) ^ hasServer);
            n++;
        }
        assertTrue(n > 0);
        proxy.ping(null, newEmptyRequest());
    } finally {
        stop(server, proxy);
    }
}
Also used : Connection(org.apache.hadoop.ipc.Server.Connection) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) AuthMethod(org.apache.hadoop.security.SaslRpcServer.AuthMethod)

Aggregations

Connection (org.apache.hadoop.ipc.Server.Connection)3 ServiceException (com.google.protobuf.ServiceException)1 IOException (java.io.IOException)1 InterruptedIOException (java.io.InterruptedIOException)1 Text (org.apache.hadoop.io.Text)1 MutableCounterLong (org.apache.hadoop.metrics2.lib.MutableCounterLong)1 AuthMethod (org.apache.hadoop.security.SaslRpcServer.AuthMethod)1 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)1 Test (org.junit.Test)1