Search in sources :

Example 1 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class TestSecureExport method testVisibilityLabels.

@Test
// See HBASE-23990
@org.junit.Ignore
public void testVisibilityLabels() throws IOException, Throwable {
    final String exportTable = name.getMethodName() + "_export";
    final String importTable = name.getMethodName() + "_import";
    final TableDescriptor exportHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).build();
    User owner = User.createUserForTesting(UTIL.getConfiguration(), USER_OWNER, new String[0]);
    SecureTestUtil.createTable(UTIL, owner, exportHtd, new byte[][] { Bytes.toBytes("s") });
    AccessTestAction putAction = () -> {
        Put p1 = new Put(ROW1);
        p1.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p1.setCellVisibility(new CellVisibility(SECRET));
        Put p2 = new Put(ROW2);
        p2.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p2.setCellVisibility(new CellVisibility(PRIVATE + " & " + CONFIDENTIAL));
        Put p3 = new Put(ROW3);
        p3.addColumn(FAMILYA, QUAL, NOW, QUAL);
        p3.setCellVisibility(new CellVisibility("!" + CONFIDENTIAL + " & " + TOPSECRET));
        try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
            Table t = conn.getTable(TableName.valueOf(exportTable))) {
            t.put(p1);
            t.put(p2);
            t.put(p3);
        }
        return null;
    };
    SecureTestUtil.verifyAllowed(putAction, getUserByLogin(USER_OWNER));
    List<Pair<List<String>, Integer>> labelsAndRowCounts = new LinkedList<>();
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(SECRET), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(PRIVATE, CONFIDENTIAL), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET), 1));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL), 0));
    labelsAndRowCounts.add(new Pair<>(Arrays.asList(TOPSECRET, CONFIDENTIAL, PRIVATE, SECRET), 2));
    for (final Pair<List<String>, Integer> labelsAndRowCount : labelsAndRowCounts) {
        final List<String> labels = labelsAndRowCount.getFirst();
        final int rowCount = labelsAndRowCount.getSecond();
        // create a open permission directory.
        final Path openDir = new Path("testAccessCase");
        final FileSystem fs = openDir.getFileSystem(UTIL.getConfiguration());
        fs.mkdirs(openDir);
        fs.setPermission(openDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
        final Path output = fs.makeQualified(new Path(openDir, "output"));
        AccessTestAction exportAction = () -> {
            StringBuilder buf = new StringBuilder();
            labels.forEach(v -> buf.append(v).append(","));
            buf.deleteCharAt(buf.length() - 1);
            try {
                String[] args = new String[] { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + buf.toString(), exportTable, output.toString() };
                Export.run(new Configuration(UTIL.getConfiguration()), args);
                return null;
            } catch (ServiceException | IOException ex) {
                throw ex;
            } catch (Throwable ex) {
                throw new Exception(ex);
            }
        };
        SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_OWNER));
        final TableDescriptor importHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(importTable)).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYB)).build();
        SecureTestUtil.createTable(UTIL, owner, importHtd, new byte[][] { Bytes.toBytes("s") });
        AccessTestAction importAction = () -> {
            String[] args = new String[] { "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING, importTable, output.toString() };
            assertEquals(0, ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args));
            return null;
        };
        SecureTestUtil.verifyAllowed(importAction, getUserByLogin(USER_OWNER));
        AccessTestAction scanAction = () -> {
            Scan scan = new Scan();
            scan.setAuthorizations(new Authorizations(labels));
            try (Connection conn = ConnectionFactory.createConnection(UTIL.getConfiguration());
                Table table = conn.getTable(importHtd.getTableName());
                ResultScanner scanner = table.getScanner(scan)) {
                int count = 0;
                for (Result r : scanner) {
                    ++count;
                }
                assertEquals(rowCount, count);
            }
            return null;
        };
        SecureTestUtil.verifyAllowed(scanAction, getUserByLogin(USER_OWNER));
        AccessTestAction deleteAction = () -> {
            UTIL.deleteTable(importHtd.getTableName());
            return null;
        };
        SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
        clearOutput(output);
    }
    AccessTestAction deleteAction = () -> {
        UTIL.deleteTable(exportHtd.getTableName());
        return null;
    };
    SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
}
Also used : Arrays(java.util.Arrays) UserProvider(org.apache.hadoop.hbase.security.UserProvider) VisibilityConstants(org.apache.hadoop.hbase.security.visibility.VisibilityConstants) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) LoggerFactory(org.slf4j.LoggerFactory) PermissionStorage(org.apache.hadoop.hbase.security.access.PermissionStorage) FileStatus(org.apache.hadoop.fs.FileStatus) FsPermission(org.apache.hadoop.fs.permission.FsPermission) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) AccessControlConstants(org.apache.hadoop.hbase.security.access.AccessControlConstants) VisibilityLabelsProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) After(org.junit.After) Path(org.apache.hadoop.fs.Path) HadoopSecurityEnabledUserProviderForTesting(org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting) ClassRule(org.junit.ClassRule) Pair(org.apache.hadoop.hbase.util.Pair) AfterClass(org.junit.AfterClass) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) HBaseKerberosUtils(org.apache.hadoop.hbase.security.HBaseKerberosUtils) Category(org.junit.experimental.categories.Category) List(java.util.List) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) VisibilityClient(org.apache.hadoop.hbase.security.visibility.VisibilityClient) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) ExportUtils(org.apache.hadoop.hbase.mapreduce.ExportUtils) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Permission(org.apache.hadoop.hbase.security.access.Permission) AccessTestAction(org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction) BeforeClass(org.junit.BeforeClass) FsAction(org.apache.hadoop.fs.permission.FsAction) User(org.apache.hadoop.hbase.security.User) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) TestName(org.junit.rules.TestName) LinkedList(java.util.LinkedList) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) MediumTests(org.apache.hadoop.hbase.testclassification.MediumTests) Put(org.apache.hadoop.hbase.client.Put) Import(org.apache.hadoop.hbase.mapreduce.Import) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) ToolRunner(org.apache.hadoop.util.ToolRunner) IOException(java.io.IOException) Test(org.junit.Test) File(java.io.File) ConnectionFactory(org.apache.hadoop.hbase.client.ConnectionFactory) Scan(org.apache.hadoop.hbase.client.Scan) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Rule(org.junit.Rule) SecureTestUtil(org.apache.hadoop.hbase.security.access.SecureTestUtil) VisibilityTestUtil(org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil) CellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibility) Connection(org.apache.hadoop.hbase.client.Connection) Table(org.apache.hadoop.hbase.client.Table) Assert.assertEquals(org.junit.Assert.assertEquals) User(org.apache.hadoop.hbase.security.User) Import(org.apache.hadoop.hbase.mapreduce.Import) Configuration(org.apache.hadoop.conf.Configuration) AccessTestAction(org.apache.hadoop.hbase.security.access.SecureTestUtil.AccessTestAction) CellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibility) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) List(java.util.List) LinkedList(java.util.LinkedList) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Pair(org.apache.hadoop.hbase.util.Pair) Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) LinkedList(java.util.LinkedList) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) IOException(java.io.IOException) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 2 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class RSRpcServices method checkBatchSizeAndLogLargeSize.

private void checkBatchSizeAndLogLargeSize(MultiRequest request) throws ServiceException {
    int sum = 0;
    String firstRegionName = null;
    for (RegionAction regionAction : request.getRegionActionList()) {
        if (sum == 0) {
            firstRegionName = Bytes.toStringBinary(regionAction.getRegion().getValue().toByteArray());
        }
        sum += regionAction.getActionCount();
    }
    if (sum > rowSizeWarnThreshold) {
        LOG.warn("Large batch operation detected (greater than " + rowSizeWarnThreshold + ") (HBASE-18023)." + " Requested Number of Rows: " + sum + " Client: " + RpcServer.getRequestUserName().orElse(null) + "/" + RpcServer.getRemoteAddress().orElse(null) + " first region in multi=" + firstRegionName);
        if (rejectRowsWithSizeOverThreshold) {
            throw new ServiceException("Rejecting large batch operation for current batch with firstRegionName: " + firstRegionName + " , Requested Number of Rows: " + sum + " , Size Threshold: " + rowSizeWarnThreshold);
        }
    }
}
Also used : ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString)

Example 3 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class RSRpcServices method getStoreFile.

@Override
@QosPriority(priority = HConstants.ADMIN_QOS)
public GetStoreFileResponse getStoreFile(final RpcController controller, final GetStoreFileRequest request) throws ServiceException {
    try {
        checkOpen();
        HRegion region = getRegion(request.getRegion());
        requestCount.increment();
        Set<byte[]> columnFamilies;
        if (request.getFamilyCount() == 0) {
            columnFamilies = region.getTableDescriptor().getColumnFamilyNames();
        } else {
            columnFamilies = new TreeSet<>(Bytes.BYTES_RAWCOMPARATOR);
            for (ByteString cf : request.getFamilyList()) {
                columnFamilies.add(cf.toByteArray());
            }
        }
        int nCF = columnFamilies.size();
        List<String> fileList = region.getStoreFileList(columnFamilies.toArray(new byte[nCF][]));
        GetStoreFileResponse.Builder builder = GetStoreFileResponse.newBuilder();
        builder.addAllStoreFile(fileList);
        return builder.build();
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
}
Also used : ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) UncheckedIOException(java.io.UncheckedIOException) GetStoreFileResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse) QosPriority(org.apache.hadoop.hbase.ipc.QosPriority)

Example 4 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class RSRpcServices method rpcPreCheck.

/**
 * Checks for the following pre-checks in order:
 * <ol>
 *   <li>RegionServer is running</li>
 *   <li>If authorization is enabled, then RPC caller has ADMIN permissions</li>
 * </ol>
 * @param requestName name of rpc request. Used in reporting failures to provide context.
 * @throws ServiceException If any of the above listed pre-check fails.
 */
private void rpcPreCheck(String requestName) throws ServiceException {
    try {
        checkOpen();
        requirePermission(requestName, Permission.Action.ADMIN);
    } catch (IOException ioe) {
        throw new ServiceException(ioe);
    }
}
Also used : ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) UncheckedIOException(java.io.UncheckedIOException)

Example 5 with ServiceException

use of org.apache.hbase.thirdparty.com.google.protobuf.ServiceException in project hbase by apache.

the class RSRpcServices method scan.

/**
 * Scan data in a table.
 *
 * @param controller the RPC controller
 * @param request the scan request
 * @throws ServiceException
 */
@Override
public ScanResponse scan(final RpcController controller, final ScanRequest request) throws ServiceException {
    if (controller != null && !(controller instanceof HBaseRpcController)) {
        throw new UnsupportedOperationException("We only do " + "HBaseRpcControllers! FIX IF A PROBLEM: " + controller);
    }
    if (!request.hasScannerId() && !request.hasScan()) {
        throw new ServiceException(new DoNotRetryIOException("Missing required input: scannerId or scan"));
    }
    try {
        checkOpen();
    } catch (IOException e) {
        if (request.hasScannerId()) {
            String scannerName = toScannerName(request.getScannerId());
            if (LOG.isDebugEnabled()) {
                LOG.debug("Server shutting down and client tried to access missing scanner " + scannerName);
            }
            final LeaseManager leaseManager = server.getLeaseManager();
            if (leaseManager != null) {
                try {
                    leaseManager.cancelLease(scannerName);
                } catch (LeaseException le) {
                    // No problem, ignore
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Un-able to cancel lease of scanner. It could already be closed.");
                    }
                }
            }
        }
        throw new ServiceException(e);
    }
    requestCount.increment();
    rpcScanRequestCount.increment();
    RegionScannerHolder rsh;
    ScanResponse.Builder builder = ScanResponse.newBuilder();
    String scannerName;
    try {
        if (request.hasScannerId()) {
            // The downstream projects such as AsyncHBase in OpenTSDB need this value. See HBASE-18000
            // for more details.
            long scannerId = request.getScannerId();
            builder.setScannerId(scannerId);
            scannerName = toScannerName(scannerId);
            rsh = getRegionScanner(request);
        } else {
            Pair<String, RegionScannerHolder> scannerNameAndRSH = newRegionScanner(request, builder);
            scannerName = scannerNameAndRSH.getFirst();
            rsh = scannerNameAndRSH.getSecond();
        }
    } catch (IOException e) {
        if (e == SCANNER_ALREADY_CLOSED) {
            // the old client will still send a close request to us. Just ignore it and return.
            return builder.build();
        }
        throw new ServiceException(e);
    }
    if (rsh.fullRegionScan) {
        rpcFullScanRequestCount.increment();
    }
    HRegion region = rsh.r;
    LeaseManager.Lease lease;
    try {
        // Remove lease while its being processed in server; protects against case
        // where processing of request takes > lease expiration time. or null if none found.
        lease = server.getLeaseManager().removeLease(scannerName);
    } catch (LeaseException e) {
        throw new ServiceException(e);
    }
    if (request.hasRenew() && request.getRenew()) {
        // add back and return
        addScannerLeaseBack(lease);
        try {
            checkScanNextCallSeq(request, rsh);
        } catch (OutOfOrderScannerNextException e) {
            throw new ServiceException(e);
        }
        return builder.build();
    }
    OperationQuota quota;
    try {
        quota = getRpcQuotaManager().checkQuota(region, OperationQuota.OperationType.SCAN);
    } catch (IOException e) {
        addScannerLeaseBack(lease);
        throw new ServiceException(e);
    }
    try {
        checkScanNextCallSeq(request, rsh);
    } catch (OutOfOrderScannerNextException e) {
        addScannerLeaseBack(lease);
        throw new ServiceException(e);
    }
    // Now we have increased the next call sequence. If we give client an error, the retry will
    // never success. So we'd better close the scanner and return a DoNotRetryIOException to client
    // and then client will try to open a new scanner.
    boolean closeScanner = request.hasCloseScanner() ? request.getCloseScanner() : false;
    // this is scan.getCaching
    int rows;
    if (request.hasNumberOfRows()) {
        rows = request.getNumberOfRows();
    } else {
        rows = closeScanner ? 0 : 1;
    }
    RpcCallContext context = RpcServer.getCurrentCall().orElse(null);
    // now let's do the real scan.
    long maxQuotaResultSize = Math.min(maxScannerResultSize, quota.getReadAvailable());
    RegionScanner scanner = rsh.s;
    // this is the limit of rows for this scan, if we the number of rows reach this value, we will
    // close the scanner.
    int limitOfRows;
    if (request.hasLimitOfRows()) {
        limitOfRows = request.getLimitOfRows();
    } else {
        limitOfRows = -1;
    }
    MutableObject<Object> lastBlock = new MutableObject<>();
    boolean scannerClosed = false;
    try {
        List<Result> results = new ArrayList<>(Math.min(rows, 512));
        if (rows > 0) {
            boolean done = false;
            // Call coprocessor. Get region info from scanner.
            if (region.getCoprocessorHost() != null) {
                Boolean bypass = region.getCoprocessorHost().preScannerNext(scanner, results, rows);
                if (!results.isEmpty()) {
                    for (Result r : results) {
                        lastBlock.setValue(addSize(context, r, lastBlock.getValue()));
                    }
                }
                if (bypass != null && bypass.booleanValue()) {
                    done = true;
                }
            }
            if (!done) {
                scan((HBaseRpcController) controller, request, rsh, maxQuotaResultSize, rows, limitOfRows, results, builder, lastBlock, context);
            } else {
                builder.setMoreResultsInRegion(!results.isEmpty());
            }
        } else {
            // This is a open scanner call with numberOfRow = 0, so set more results in region to true.
            builder.setMoreResultsInRegion(true);
        }
        quota.addScanResult(results);
        addResults(builder, results, (HBaseRpcController) controller, RegionReplicaUtil.isDefaultReplica(region.getRegionInfo()), isClientCellBlockSupport(context));
        if (scanner.isFilterDone() && results.isEmpty()) {
            // If the scanner's filter - if any - is done with the scan
            // only set moreResults to false if the results is empty. This is used to keep compatible
            // with the old scan implementation where we just ignore the returned results if moreResults
            // is false. Can remove the isEmpty check after we get rid of the old implementation.
            builder.setMoreResults(false);
        }
        // have already set this flag.
        assert builder.hasMoreResultsInRegion();
        // yet.
        if (!builder.hasMoreResults()) {
            builder.setMoreResults(true);
        }
        if (builder.getMoreResults() && builder.getMoreResultsInRegion() && !results.isEmpty()) {
            // Record the last cell of the last result if it is a partial result
            // We need this to calculate the complete rows we have returned to client as the
            // mayHaveMoreCellsInRow is true does not mean that there will be extra cells for the
            // current row. We may filter out all the remaining cells for the current row and just
            // return the cells of the nextRow when calling RegionScanner.nextRaw. So here we need to
            // check for row change.
            Result lastResult = results.get(results.size() - 1);
            if (lastResult.mayHaveMoreCellsInRow()) {
                rsh.rowOfLastPartialResult = lastResult.getRow();
            } else {
                rsh.rowOfLastPartialResult = null;
            }
        }
        if (!builder.getMoreResults() || !builder.getMoreResultsInRegion() || closeScanner) {
            scannerClosed = true;
            closeScanner(region, scanner, scannerName, context);
        }
        return builder.build();
    } catch (IOException e) {
        try {
            // scanner is closed here
            scannerClosed = true;
            // The scanner state might be left in a dirty state, so we will tell the Client to
            // fail this RPC and close the scanner while opening up another one from the start of
            // row that the client has last seen.
            closeScanner(region, scanner, scannerName, context);
            // the client. See ClientScanner code to see how it deals with these special exceptions.
            if (e instanceof DoNotRetryIOException) {
                throw e;
            }
            // DoNotRetryIOException. This can avoid the retry in ClientScanner.
            if (e instanceof FileNotFoundException) {
                throw new DoNotRetryIOException(e);
            }
            // a special exception to save an RPC.
            if (VersionInfoUtil.hasMinimumVersion(context.getClientVersionInfo(), 1, 4)) {
                // 1.4.0+ clients know how to handle
                throw new ScannerResetException("Scanner is closed on the server-side", e);
            } else {
                // older clients do not know about SRE. Just throw USE, which they will handle
                throw new UnknownScannerException("Throwing UnknownScannerException to reset the client" + " scanner state for clients older than 1.3.", e);
            }
        } catch (IOException ioe) {
            throw new ServiceException(ioe);
        }
    } finally {
        if (!scannerClosed) {
            // the closeCallBack will be set in closeScanner so here we only care about shippedCallback
            if (context != null) {
                context.setCallBack(rsh.shippedCallback);
            } else {
                // When context != null, adding back the lease will be done in callback set above.
                addScannerLeaseBack(lease);
            }
        }
        quota.close();
    }
}
Also used : DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ScannerResetException(org.apache.hadoop.hbase.exceptions.ScannerResetException) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) RegionActionResult(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult) Result(org.apache.hadoop.hbase.client.Result) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) OutOfOrderScannerNextException(org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException) MutableObject(org.apache.commons.lang3.mutable.MutableObject) RpcCallContext(org.apache.hadoop.hbase.ipc.RpcCallContext) Lease(org.apache.hadoop.hbase.regionserver.LeaseManager.Lease) ScanResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse) OperationQuota(org.apache.hadoop.hbase.quotas.OperationQuota) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) UncheckedIOException(java.io.UncheckedIOException) UnknownScannerException(org.apache.hadoop.hbase.UnknownScannerException) HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Aggregations

ServiceException (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException)130 IOException (java.io.IOException)112 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)100 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)39 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)28 UncheckedIOException (java.io.UncheckedIOException)27 TableName (org.apache.hadoop.hbase.TableName)22 QosPriority (org.apache.hadoop.hbase.ipc.QosPriority)22 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)19 UnknownRegionException (org.apache.hadoop.hbase.UnknownRegionException)16 UnknownProtocolException (org.apache.hadoop.hbase.exceptions.UnknownProtocolException)16 Test (org.junit.Test)16 InvocationTargetException (java.lang.reflect.InvocationTargetException)15 ArrayList (java.util.ArrayList)15 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)15 ForeignException (org.apache.hadoop.hbase.errorhandling.ForeignException)15 ServerNotRunningYetException (org.apache.hadoop.hbase.ipc.ServerNotRunningYetException)15 KeeperException (org.apache.zookeeper.KeeperException)14 Table (org.apache.hadoop.hbase.client.Table)13 User (org.apache.hadoop.hbase.security.User)13