Search in sources :

Example 66 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class TestHMasterRPCException method testRPCException.

@Test
public void testRPCException() throws IOException, InterruptedException, KeeperException {
    ServerName sm = master.getServerName();
    boolean fakeZNodeDelete = false;
    for (int i = 0; i < 20; i++) {
        try {
            BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(sm, User.getCurrent(), 0);
            MasterProtos.MasterService.BlockingInterface stub = MasterProtos.MasterService.newBlockingStub(channel);
            assertTrue(stub.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance()).getIsMasterRunning());
            return;
        } catch (ServiceException ex) {
            IOException ie = ProtobufUtil.handleRemoteException(ex);
            // No SocketTimeoutException here. RpcServer is already started after the construction of
            // HMaster.
            assertTrue(ie.getMessage().startsWith("org.apache.hadoop.hbase.ipc.ServerNotRunningYetException: Server is not running yet"));
            LOG.info("Expected exception: ", ie);
            if (!fakeZNodeDelete) {
                testUtil.getZooKeeperWatcher().getRecoverableZooKeeper().delete(testUtil.getZooKeeperWatcher().getZNodePaths().masterAddressZNode, -1);
                fakeZNodeDelete = true;
            }
        }
        Thread.sleep(1000);
    }
}
Also used : ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) ServerName(org.apache.hadoop.hbase.ServerName) BlockingRpcChannel(org.apache.hbase.thirdparty.com.google.protobuf.BlockingRpcChannel) IOException(java.io.IOException) Test(org.junit.Test)

Example 67 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class TestCoprocessorEndpoint method testCoprocessorError.

@Test
public void testCoprocessorError() throws Exception {
    Configuration configuration = new Configuration(util.getConfiguration());
    // Make it not retry forever
    configuration.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
    Table table = util.getConnection().getTable(TEST_TABLE);
    try {
        CoprocessorRpcChannel protocol = table.coprocessorService(ROWS[0]);
        TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service = TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(protocol);
        service.error(null, TestProtos.EmptyRequestProto.getDefaultInstance());
        fail("Should have thrown an exception");
    } catch (ServiceException e) {
    } finally {
        table.close();
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) Test(org.junit.Test)

Example 68 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class TestCoprocessorEndpoint method testMasterCoprocessorError.

@Test
public void testMasterCoprocessorError() throws Throwable {
    Admin admin = util.getAdmin();
    TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service = TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(admin.coprocessorService());
    try {
        service.error(null, TestProtos.EmptyRequestProto.getDefaultInstance());
        fail("Should have thrown an exception");
    } catch (ServiceException e) {
    }
}
Also used : ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) Admin(org.apache.hadoop.hbase.client.Admin) Test(org.junit.Test)

Example 69 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class TestSecureExport method testAccessCase.

/**
 * Test the ExportEndpoint's access levels. The {@link Export} test is ignored
 * since the access exceptions cannot be collected from the mappers.
 */
@Test
public void testAccessCase() throws Throwable {
    final String exportTable = name.getMethodName();
    TableDescriptor exportHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).build();
    User owner = User.createUserForTesting(UTIL.getConfiguration(), USER_OWNER, new String[0]);
    SecureTestUtil.createTable(UTIL, owner, exportHtd, new byte[][] { Bytes.toBytes("s") });
    SecureTestUtil.grantOnTable(UTIL, USER_RO, TableName.valueOf(exportTable), null, null, Permission.Action.READ);
    SecureTestUtil.grantOnTable(UTIL, USER_RX, TableName.valueOf(exportTable), null, null, Permission.Action.READ, Permission.Action.EXEC);
    SecureTestUtil.grantOnTable(UTIL, USER_XO, TableName.valueOf(exportTable), null, null, Permission.Action.EXEC);
    assertEquals(4, PermissionStorage.getTablePermissions(UTIL.getConfiguration(), TableName.valueOf(exportTable)).size());
    AccessTestAction putAction = () -> {
        Put p = new Put(ROW1);
        p.addColumn(FAMILYA, Bytes.toBytes("qual_0"), NOW, QUAL);
        p.addColumn(FAMILYA, Bytes.toBytes("qual_1"), NOW, QUAL);
        try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
            Table t = conn.getTable(TableName.valueOf(exportTable))) {
            t.put(p);
        }
        return null;
    };
    // no hdfs access.
    SecureTestUtil.verifyAllowed(putAction, getUserByLogin(USER_ADMIN), getUserByLogin(USER_OWNER));
    SecureTestUtil.verifyDenied(putAction, getUserByLogin(USER_RO), getUserByLogin(USER_XO), getUserByLogin(USER_RX), getUserByLogin(USER_NONE));
    final FileSystem fs = UTIL.getDFSCluster().getFileSystem();
    final Path openDir = fs.makeQualified(new Path("testAccessCase"));
    fs.mkdirs(openDir);
    fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
    final Path output = fs.makeQualified(new Path(openDir, "output"));
    AccessTestAction exportAction = () -> {
        try {
            String[] args = new String[] { exportTable, output.toString() };
            Map<byte[], Export.Response> result = Export.run(new Configuration(UTIL.getConfiguration()), args);
            long rowCount = 0;
            long cellCount = 0;
            for (Export.Response r : result.values()) {
                rowCount += r.getRowCount();
                cellCount += r.getCellCount();
            }
            assertEquals(1, rowCount);
            assertEquals(2, cellCount);
            return null;
        } catch (ServiceException | IOException ex) {
            throw ex;
        } catch (Throwable ex) {
            LOG.error(ex.toString(), ex);
            throw new Exception(ex);
        } finally {
            if (fs.exists(new Path(openDir, "output"))) {
                // if export completes successfully, every file under the output directory should be
                // owned by the current user, not the hbase service user.
                FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, "output"));
                String currentUserName = User.getCurrent().getShortName();
                assertEquals("Unexpected file owner", currentUserName, outputDirFileStatus.getOwner());
                FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, "output"));
                for (FileStatus fileStatus : outputFileStatus) {
                    assertEquals("Unexpected file owner", currentUserName, fileStatus.getOwner());
                }
            } else {
                LOG.info("output directory doesn't exist. Skip check");
            }
            clearOutput(output);
        }
    };
    SecureTestUtil.verifyDenied(exportAction, getUserByLogin(USER_RO), getUserByLogin(USER_XO), getUserByLogin(USER_NONE));
    SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_ADMIN), getUserByLogin(USER_OWNER), getUserByLogin(USER_RX));
    AccessTestAction deleteAction = () -> {
        UTIL.deleteTable(TableName.valueOf(exportTable));
        return null;
    };
    SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
    fs.delete(openDir, true);
}
Also used : Path(org.apache.hadoop.fs.Path) User(org.apache.hadoop.hbase.security.User) Table(org.apache.hadoop.hbase.client.Table) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) AccessTestAction(org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction) Connection(org.apache.hadoop.hbase.client.Connection) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) IOException(java.io.IOException) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Map(java.util.Map) Test(org.junit.Test)

Example 70 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class HBaseRpcServicesBase method clearSlowLogsResponses.

@Override
@QosPriority(priority = HConstants.ADMIN_QOS)
public ClearSlowLogResponses clearSlowLogsResponses(final RpcController controller, final ClearSlowLogResponseRequest request) throws ServiceException {
    try {
        requirePermission("clearSlowLogsResponses", Permission.Action.ADMIN);
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    final NamedQueueRecorder namedQueueRecorder = this.server.getNamedQueueRecorder();
    boolean slowLogsCleaned = Optional.ofNullable(namedQueueRecorder).map(queueRecorder -> queueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG)).orElse(false);
    ClearSlowLogResponses clearSlowLogResponses = ClearSlowLogResponses.newBuilder().setIsCleaned(slowLogsCleaned).build();
    return clearSlowLogResponses;
}
Also used : NamedQueueRecorder(org.apache.hadoop.hbase.namequeues.NamedQueueRecorder) GetMastersRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetMastersRequest) SlowLogPayload(org.apache.hadoop.hbase.shaded.protobuf.generated.TooSlowLog.SlowLogPayload) RequestHeader(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader) LoggerFactory(org.slf4j.LoggerFactory) RpcSchedulerFactory(org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) NamedQueueGetResponse(org.apache.hadoop.hbase.namequeues.response.NamedQueueGetResponse) NamedQueueRecorder(org.apache.hadoop.hbase.namequeues.NamedQueueRecorder) Configuration(org.apache.hadoop.conf.Configuration) PriorityFunction(org.apache.hadoop.hbase.ipc.PriorityFunction) Method(java.lang.reflect.Method) Address(org.apache.hadoop.hbase.net.Address) NoopAccessChecker(org.apache.hadoop.hbase.security.access.NoopAccessChecker) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) RpcLogDetails(org.apache.hadoop.hbase.namequeues.RpcLogDetails) ZKPermissionWatcher(org.apache.hadoop.hbase.security.access.ZKPermissionWatcher) GetBootstrapNodesRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetBootstrapNodesRequest) ZKWatcher(org.apache.hadoop.hbase.zookeeper.ZKWatcher) SlowLogResponseRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SlowLogResponseRequest) GetMetaRegionLocationsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetMetaRegionLocationsRequest) InetSocketAddress(java.net.InetSocketAddress) GetMetaRegionLocationsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetMetaRegionLocationsResponse) InvocationTargetException(java.lang.reflect.InvocationTargetException) List(java.util.List) ConfigurationObserver(org.apache.hadoop.hbase.conf.ConfigurationObserver) RpcServerInterface(org.apache.hadoop.hbase.ipc.RpcServerInterface) UpdateConfigurationResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse) Optional(java.util.Optional) ClearSlowLogResponseRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponseRequest) GetActiveMasterResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetActiveMasterResponse) HBaseRPCErrorHandler(org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler) Permission(org.apache.hadoop.hbase.security.access.Permission) DNS(org.apache.hadoop.hbase.util.DNS) ByteBuffAllocator(org.apache.hadoop.hbase.io.ByteBuffAllocator) ReservoirSample(org.apache.hadoop.hbase.util.ReservoirSample) NamedQueueGetRequest(org.apache.hadoop.hbase.namequeues.request.NamedQueueGetRequest) ConnectionUtils(org.apache.hadoop.hbase.client.ConnectionUtils) RpcController(org.apache.hbase.thirdparty.com.google.protobuf.RpcController) GetMastersResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetMastersResponse) BindException(java.net.BindException) User(org.apache.hadoop.hbase.security.User) ClearSlowLogResponses(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponses) UpdateConfigurationRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest) GetBootstrapNodesResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetBootstrapNodesResponse) GetClusterIdResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetClusterIdResponse) NamedQueuePayload(org.apache.hadoop.hbase.namequeues.NamedQueuePayload) GetMastersResponseEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetMastersResponseEntry) RpcServer(org.apache.hadoop.hbase.ipc.RpcServer) SlowLogResponses(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SlowLogResponses) Logger(org.slf4j.Logger) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos) KeeperException(org.apache.zookeeper.KeeperException) GetClusterIdRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetClusterIdRequest) AdminService(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService) GetActiveMasterRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.GetActiveMasterRequest) IOException(java.io.IOException) ClientMetaService(org.apache.hadoop.hbase.shaded.protobuf.generated.RegistryProtos.ClientMetaService) RpcServerFactory(org.apache.hadoop.hbase.ipc.RpcServerFactory) QosPriority(org.apache.hadoop.hbase.ipc.QosPriority) OOMEChecker(org.apache.hadoop.hbase.util.OOMEChecker) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) BlockingServiceAndInterface(org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface) AccessChecker(org.apache.hadoop.hbase.security.access.AccessChecker) RpcScheduler(org.apache.hadoop.hbase.ipc.RpcScheduler) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) Collections(java.util.Collections) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) IOException(java.io.IOException) ClearSlowLogResponses(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponses) QosPriority(org.apache.hadoop.hbase.ipc.QosPriority)

Aggregations

ServiceException (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException)130 IOException (java.io.IOException)112 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)100 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)39 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)28 UncheckedIOException (java.io.UncheckedIOException)27 TableName (org.apache.hadoop.hbase.TableName)22 QosPriority (org.apache.hadoop.hbase.ipc.QosPriority)22 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)19 UnknownRegionException (org.apache.hadoop.hbase.UnknownRegionException)16 UnknownProtocolException (org.apache.hadoop.hbase.exceptions.UnknownProtocolException)16 Test (org.junit.Test)16 InvocationTargetException (java.lang.reflect.InvocationTargetException)15 ArrayList (java.util.ArrayList)15 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)15 ForeignException (org.apache.hadoop.hbase.errorhandling.ForeignException)15 ServerNotRunningYetException (org.apache.hadoop.hbase.ipc.ServerNotRunningYetException)15 KeeperException (org.apache.zookeeper.KeeperException)14 Table (org.apache.hadoop.hbase.client.Table)13 User (org.apache.hadoop.hbase.security.User)13