Search in sources :

Example 86 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hbase by apache.

the class TestDelegationTokenWithEncryption method testPutGetWithDelegationToken.

@Test
public void testPutGetWithDelegationToken() throws Exception {
    try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
        Token<? extends TokenIdentifier> token = TokenUtil.obtainToken(conn);
        UserGroupInformation.getCurrentUser().addToken(token);
        // create the table for test
        Admin admin = conn.getAdmin();
        HTableDescriptor tableDescriptor = new HTableDescriptor(new HTableDescriptor(TableName.valueOf("testtable")));
        tableDescriptor.addFamily(new HColumnDescriptor("family"));
        admin.createTable(tableDescriptor);
        testPutGetWithDelegationToken(BlockingRpcClient.class);
        testPutGetWithDelegationToken(NettyRpcClient.class);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 87 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hbase by apache.

the class TestGenerateDelegationToken method testTokenAuth.

private void testTokenAuth(Class<? extends RpcClient> rpcImplClass) throws IOException, ServiceException {
    TEST_UTIL.getConfiguration().set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcImplClass.getName());
    try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
        Table table = conn.getTable(TableName.META_TABLE_NAME)) {
        CoprocessorRpcChannel rpcChannel = table.coprocessorService(HConstants.EMPTY_START_ROW);
        AuthenticationProtos.AuthenticationService.BlockingInterface service = AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
        WhoAmIResponse response = service.whoAmI(null, WhoAmIRequest.getDefaultInstance());
        assertEquals(USERNAME, response.getUsername());
        assertEquals(AuthenticationMethod.TOKEN.name(), response.getAuthMethod());
        try {
            service.getAuthenticationToken(null, GetAuthenticationTokenRequest.getDefaultInstance());
        } catch (ServiceException e) {
            AccessDeniedException exc = (AccessDeniedException) ProtobufUtil.handleRemoteException(e);
            assertTrue(exc.getMessage().contains("Token generation only allowed for Kerberos authenticated clients"));
        }
    }
}
Also used : AccessDeniedException(org.apache.hadoop.hbase.security.AccessDeniedException) Table(org.apache.hadoop.hbase.client.Table) ServiceException(com.google.protobuf.ServiceException) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) Connection(org.apache.hadoop.hbase.client.Connection) WhoAmIResponse(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse)

Example 88 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hbase by apache.

the class TestTokenAuthentication method testUseExistingToken.

// This won't work any more now RpcServer takes Shaded Service. It depends on RPCServer being able to provide a
// non-shaded service. TODO: FIX. Tried to make RPC generic but then it ripples; have to make Connection generic.
// And Call generic, etc.
//
//  @Test
//  public void testTokenAuthentication() throws Exception {
//    UserGroupInformation testuser =
//        UserGroupInformation.createUserForTesting("testuser", new String[]{"testgroup"});
//    testuser.setAuthenticationMethod(
//        UserGroupInformation.AuthenticationMethod.TOKEN);
//    final Configuration conf = TEST_UTIL.getConfiguration();
//    UserGroupInformation.setConfiguration(conf);
//    Token<AuthenticationTokenIdentifier> token = secretManager.generateToken("testuser");
//    LOG.debug("Got token: " + token.toString());
//    testuser.addToken(token);
//    // Verify the server authenticates us as this token user
//    testuser.doAs(new PrivilegedExceptionAction<Object>() {
//      public Object run() throws Exception {
//        Configuration c = server.getConfiguration();
//        final RpcClient rpcClient = RpcClientFactory.createClient(c, clusterId.toString());
//        ServerName sn =
//            ServerName.valueOf(server.getAddress().getHostName(), server.getAddress().getPort(),
//                System.currentTimeMillis());
//        try {
//          // Make a proxy to go between the shaded RpcController that rpc expects and the
//          // non-shaded controller this CPEP is providing. This is because this test does a neat
//          // little trick of testing the CPEP Service by inserting it as RpcServer Service. This
//          // worked fine before we shaded PB. Now we need these proxies.
//          final org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel =
//              rpcClient.createBlockingRpcChannel(sn, User.getCurrent(), HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
//          AuthenticationProtos.AuthenticationService.BlockingInterface stub =
//              AuthenticationProtos.AuthenticationService.newBlockingStub(channel);
//          AuthenticationProtos.WhoAmIResponse response =
//              stub.whoAmI(null, AuthenticationProtos.WhoAmIRequest.getDefaultInstance());
//          String myname = response.getUsername();
//          assertEquals("testuser", myname);
//          String authMethod = response.getAuthMethod();
//          assertEquals("TOKEN", authMethod);
//        } finally {
//          rpcClient.close();
//        }
//        return null;
//      }
//    });
//  }
@Test
public void testUseExistingToken() throws Exception {
    User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), "testuser2", new String[] { "testgroup" });
    Token<AuthenticationTokenIdentifier> token = secretManager.generateToken(user.getName());
    assertNotNull(token);
    user.addToken(token);
    // make sure we got a token
    Token<AuthenticationTokenIdentifier> firstToken = new AuthenticationTokenSelector().selectToken(token.getService(), user.getTokens());
    assertNotNull(firstToken);
    assertEquals(token, firstToken);
    Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
    try {
        assertFalse(TokenUtil.addTokenIfMissing(conn, user));
        // make sure we still have the same token
        Token<AuthenticationTokenIdentifier> secondToken = new AuthenticationTokenSelector().selectToken(token.getService(), user.getTokens());
        assertEquals(firstToken, secondToken);
    } finally {
        conn.close();
    }
}
Also used : User(org.apache.hadoop.hbase.security.User) ClusterConnection(org.apache.hadoop.hbase.client.ClusterConnection) Connection(org.apache.hadoop.hbase.client.Connection) Test(org.junit.Test)

Example 89 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hbase by apache.

the class TestCoprocessorWhitelistMasterObserver method positiveTestCase.

/**
   * Test a table modification adding a coprocessor path
   * which is not whitelisted
   * @result An IOException should be thrown and caught
   *         to show coprocessor is working as desired
   * @param whitelistedPaths A String array of paths to add in
   *         for the whitelisting configuration
   * @param coprocessorPath A String to use as the
   *         path for a mock coprocessor
   */
private static void positiveTestCase(String[] whitelistedPaths, String coprocessorPath) throws Exception {
    Configuration conf = UTIL.getConfiguration();
    // load coprocessor under test
    conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, CoprocessorWhitelistMasterObserver.class.getName());
    conf.setStrings(CoprocessorWhitelistMasterObserver.CP_COPROCESSOR_WHITELIST_PATHS_KEY, whitelistedPaths);
    // set retries low to raise exception quickly
    conf.setInt("hbase.client.retries.number", 1);
    UTIL.startMiniCluster();
    UTIL.createTable(TEST_TABLE, new byte[][] { TEST_FAMILY });
    UTIL.waitUntilAllRegionsAssigned(TEST_TABLE);
    Connection connection = ConnectionFactory.createConnection(conf);
    Table t = connection.getTable(TEST_TABLE);
    HTableDescriptor htd = t.getTableDescriptor();
    htd.addCoprocessor("net.clayb.hbase.coprocessor.NotWhitelisted", new Path(coprocessorPath), Coprocessor.PRIORITY_USER, null);
    LOG.info("Modifying Table");
    try {
        connection.getAdmin().modifyTable(TEST_TABLE, htd);
        fail("Expected coprocessor to raise IOException");
    } catch (IOException e) {
    // swallow exception from coprocessor
    }
    LOG.info("Done Modifying Table");
    assertEquals(0, t.getTableDescriptor().getCoprocessors().size());
}
Also used : Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 90 with Connection

use of org.apache.hadoop.hbase.client.Connection in project hbase by apache.

the class TestCoprocessorWhitelistMasterObserver method testCreationClasspathCoprocessor.

/**
   * Test a table creation including a coprocessor path
   * which is on the classpath
   * @result Table will be created with the coprocessor
   */
@Test
public void testCreationClasspathCoprocessor() throws Exception {
    Configuration conf = UTIL.getConfiguration();
    // load coprocessor under test
    conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, CoprocessorWhitelistMasterObserver.class.getName());
    conf.setStrings(CoprocessorWhitelistMasterObserver.CP_COPROCESSOR_WHITELIST_PATHS_KEY, new String[] {});
    // set retries low to raise exception quickly
    conf.setInt("hbase.client.retries.number", 1);
    UTIL.startMiniCluster();
    HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
    HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
    htd.addFamily(hcd);
    htd.addCoprocessor(TestRegionObserver.class.getName());
    Connection connection = ConnectionFactory.createConnection(conf);
    Admin admin = connection.getAdmin();
    LOG.info("Creating Table");
    admin.createTable(htd);
    // ensure table was created and coprocessor is added to table
    LOG.info("Done Creating Table");
    Table t = connection.getTable(TEST_TABLE);
    assertEquals(1, t.getTableDescriptor().getCoprocessors().size());
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

Connection (org.apache.hadoop.hbase.client.Connection)307 Table (org.apache.hadoop.hbase.client.Table)194 Test (org.junit.Test)174 IOException (java.io.IOException)117 TableName (org.apache.hadoop.hbase.TableName)103 Result (org.apache.hadoop.hbase.client.Result)102 Admin (org.apache.hadoop.hbase.client.Admin)90 Scan (org.apache.hadoop.hbase.client.Scan)81 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)77 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)71 Put (org.apache.hadoop.hbase.client.Put)68 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)58 Delete (org.apache.hadoop.hbase.client.Delete)55 Configuration (org.apache.hadoop.conf.Configuration)54 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)52 Get (org.apache.hadoop.hbase.client.Get)48 InterruptedIOException (java.io.InterruptedIOException)45 Cell (org.apache.hadoop.hbase.Cell)41 CellScanner (org.apache.hadoop.hbase.CellScanner)34 ArrayList (java.util.ArrayList)26