Search in sources :

Example 91 with ServiceException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.

the class RSRpcServices method warmupRegion.

/**
   *  Wamrmup a region on this server.
   *
   * This method should only be called by Master. It synchrnously opens the region and
   * closes the region bringing the most important pages in cache.
   * <p>
   *
   * @param controller the RPC controller
   * @param request the request
   * @throws ServiceException
   */
@Override
public WarmupRegionResponse warmupRegion(final RpcController controller, final WarmupRegionRequest request) throws ServiceException {
    RegionInfo regionInfo = request.getRegionInfo();
    final HRegionInfo region = HRegionInfo.convert(regionInfo);
    HTableDescriptor htd;
    WarmupRegionResponse response = WarmupRegionResponse.getDefaultInstance();
    try {
        checkOpen();
        String encodedName = region.getEncodedName();
        byte[] encodedNameBytes = region.getEncodedNameAsBytes();
        final Region onlineRegion = regionServer.getFromOnlineRegions(encodedName);
        if (onlineRegion != null) {
            LOG.info("Region already online. Skipping warming up " + region);
            return response;
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Warming up Region " + region.getRegionNameAsString());
        }
        htd = regionServer.tableDescriptors.get(region.getTable());
        if (regionServer.getRegionsInTransitionInRS().containsKey(encodedNameBytes)) {
            LOG.info("Region is in transition. Skipping warmup " + region);
            return response;
        }
        HRegion.warmupHRegion(region, htd, regionServer.getWAL(region), regionServer.getConfiguration(), regionServer, null);
    } catch (IOException ie) {
        LOG.error("Failed warming up region " + region.getRegionNameAsString(), ie);
        throw new ServiceException(ie);
    }
    return response;
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) WarmupRegionResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) RegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 92 with ServiceException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.

the class HRegionServer method execRegionServerService.

public CoprocessorServiceResponse execRegionServerService(@SuppressWarnings("UnusedParameters") final RpcController controller, final CoprocessorServiceRequest serviceRequest) throws ServiceException {
    try {
        ServerRpcController serviceController = new ServerRpcController();
        CoprocessorServiceCall call = serviceRequest.getCall();
        String serviceName = call.getServiceName();
        com.google.protobuf.Service service = coprocessorServiceHandlers.get(serviceName);
        if (service == null) {
            throw new UnknownProtocolException(null, "No registered coprocessor service found for " + serviceName);
        }
        com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
        String methodName = call.getMethodName();
        com.google.protobuf.Descriptors.MethodDescriptor methodDesc = serviceDesc.findMethodByName(methodName);
        if (methodDesc == null) {
            throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName + " called on service " + serviceName);
        }
        com.google.protobuf.Message request = CoprocessorRpcUtils.getRequest(service, methodDesc, call.getRequest());
        final com.google.protobuf.Message.Builder responseBuilder = service.getResponsePrototype(methodDesc).newBuilderForType();
        service.callMethod(methodDesc, serviceController, request, new com.google.protobuf.RpcCallback<com.google.protobuf.Message>() {

            @Override
            public void run(com.google.protobuf.Message message) {
                if (message != null) {
                    responseBuilder.mergeFrom(message);
                }
            }
        });
        IOException exception = CoprocessorRpcUtils.getControllerException(serviceController);
        if (exception != null) {
            throw exception;
        }
        return CoprocessorRpcUtils.getResponse(responseBuilder.build(), HConstants.EMPTY_BYTE_ARRAY);
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
}
Also used : CoprocessorServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) UnknownProtocolException(org.apache.hadoop.hbase.exceptions.UnknownProtocolException) TableDescriptors(org.apache.hadoop.hbase.TableDescriptors) FSTableDescriptors(org.apache.hadoop.hbase.util.FSTableDescriptors)

Example 93 with ServiceException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.

the class HRegionServer method reportRegionStateTransition.

@Override
public boolean reportRegionStateTransition(final RegionStateTransitionContext context) {
    TransitionCode code = context.getCode();
    long openSeqNum = context.getOpenSeqNum();
    long masterSystemTime = context.getMasterSystemTime();
    HRegionInfo[] hris = context.getHris();
    if (TEST_SKIP_REPORTING_TRANSITION) {
        // to handle the region transition report at all.
        if (code == TransitionCode.OPENED) {
            Preconditions.checkArgument(hris != null && hris.length == 1);
            if (hris[0].isMetaRegion()) {
                try {
                    MetaTableLocator.setMetaLocation(getZooKeeper(), serverName, hris[0].getReplicaId(), State.OPEN);
                } catch (KeeperException e) {
                    LOG.info("Failed to update meta location", e);
                    return false;
                }
            } else {
                try {
                    MetaTableAccessor.updateRegionLocation(clusterConnection, hris[0], serverName, openSeqNum, masterSystemTime);
                } catch (IOException e) {
                    LOG.info("Failed to update meta", e);
                    return false;
                }
            }
        }
        return true;
    }
    ReportRegionStateTransitionRequest.Builder builder = ReportRegionStateTransitionRequest.newBuilder();
    builder.setServer(ProtobufUtil.toServerName(serverName));
    RegionStateTransition.Builder transition = builder.addTransitionBuilder();
    transition.setTransitionCode(code);
    if (code == TransitionCode.OPENED && openSeqNum >= 0) {
        transition.setOpenSeqNum(openSeqNum);
    }
    for (HRegionInfo hri : hris) {
        transition.addRegionInfo(HRegionInfo.convert(hri));
    }
    ReportRegionStateTransitionRequest request = builder.build();
    while (keepLooping()) {
        RegionServerStatusService.BlockingInterface rss = rssStub;
        try {
            if (rss == null) {
                createRegionServerStatusStub();
                continue;
            }
            ReportRegionStateTransitionResponse response = rss.reportRegionStateTransition(null, request);
            if (response.hasErrorMessage()) {
                LOG.info("Failed to transition " + hris[0] + " to " + code + ": " + response.getErrorMessage());
                return false;
            }
            return true;
        } catch (ServiceException se) {
            IOException ioe = ProtobufUtil.getRemoteException(se);
            LOG.info("Failed to report region transition, will retry", ioe);
            if (rssStub == rss) {
                rssStub = null;
            }
        }
    }
    return false;
}
Also used : InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) RegionStateTransition(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition) ReportRegionStateTransitionResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TransitionCode(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) ReportRegionStateTransitionRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest) RegionServerStatusService(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService) KeeperException(org.apache.zookeeper.KeeperException)

Example 94 with ServiceException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.

the class HRegionServer method requestRegionSplit.

@Override
public long requestRegionSplit(final HRegionInfo regionInfo, final byte[] splitRow) {
    NonceGenerator ng = clusterConnection.getNonceGenerator();
    final long nonceGroup = ng.getNonceGroup();
    final long nonce = ng.newNonce();
    long procId = -1;
    SplitTableRegionRequest request = RequestConverter.buildSplitTableRegionRequest(regionInfo, splitRow, nonceGroup, nonce);
    while (keepLooping()) {
        RegionServerStatusService.BlockingInterface rss = rssStub;
        try {
            if (rss == null) {
                createRegionServerStatusStub();
                continue;
            }
            SplitTableRegionResponse response = rss.splitRegion(null, request);
            //TODO: should we limit the retry number before quitting?
            if (response == null || (procId = response.getProcId()) == -1) {
                LOG.warn("Failed to split " + regionInfo + " retrying...");
                continue;
            }
            break;
        } catch (ServiceException se) {
            // TODO: retry or just fail
            IOException ioe = ProtobufUtil.getRemoteException(se);
            LOG.info("Failed to split region, will retry", ioe);
            if (rssStub == rss) {
                rssStub = null;
            }
        }
    }
    return procId;
}
Also used : ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) RegionServerStatusService(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService) SplitTableRegionRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.SplitTableRegionRequest) NonceGenerator(org.apache.hadoop.hbase.client.NonceGenerator) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) SplitTableRegionResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.SplitTableRegionResponse)

Example 95 with ServiceException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException in project hbase by apache.

the class HRegionServer method getLastSequenceId.

@Override
public RegionStoreSequenceIds getLastSequenceId(byte[] encodedRegionName) {
    try {
        GetLastFlushedSequenceIdRequest req = RequestConverter.buildGetLastFlushedSequenceIdRequest(encodedRegionName);
        RegionServerStatusService.BlockingInterface rss = rssStub;
        if (rss == null) {
            // Try to connect one more time
            createRegionServerStatusStub();
            rss = rssStub;
            if (rss == null) {
                // Still no luck, we tried
                LOG.warn("Unable to connect to the master to check " + "the last flushed sequence id");
                return RegionStoreSequenceIds.newBuilder().setLastFlushedSequenceId(HConstants.NO_SEQNUM).build();
            }
        }
        GetLastFlushedSequenceIdResponse resp = rss.getLastFlushedSequenceId(null, req);
        return RegionStoreSequenceIds.newBuilder().setLastFlushedSequenceId(resp.getLastFlushedSequenceId()).addAllStoreSequenceId(resp.getStoreLastFlushedSequenceIdList()).build();
    } catch (ServiceException e) {
        LOG.warn("Unable to connect to the master to check the last flushed sequence id", e);
        return RegionStoreSequenceIds.newBuilder().setLastFlushedSequenceId(HConstants.NO_SEQNUM).build();
    }
}
Also used : ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) GetLastFlushedSequenceIdResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse) RegionServerStatusService(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService) GetLastFlushedSequenceIdRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest)

Aggregations

ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)95 IOException (java.io.IOException)76 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)65 InterruptedIOException (java.io.InterruptedIOException)28 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)24 ServerName (org.apache.hadoop.hbase.ServerName)16 QosPriority (org.apache.hadoop.hbase.ipc.QosPriority)15 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)14 ArrayList (java.util.ArrayList)12 HBaseRpcController (org.apache.hadoop.hbase.ipc.HBaseRpcController)12 TableName (org.apache.hadoop.hbase.TableName)10 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)10 Test (org.junit.Test)9 CellScanner (org.apache.hadoop.hbase.CellScanner)5 UnknownRegionException (org.apache.hadoop.hbase.UnknownRegionException)4 ClusterConnection (org.apache.hadoop.hbase.client.ClusterConnection)4 Result (org.apache.hadoop.hbase.client.Result)4 ForeignException (org.apache.hadoop.hbase.errorhandling.ForeignException)4 RpcCallContext (org.apache.hadoop.hbase.ipc.RpcCallContext)4 OperationQuota (org.apache.hadoop.hbase.quotas.OperationQuota)4