Search in sources :

Example 6 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class HRegion method replayWALEntry.

/**
 * Replay remote wal entry sent by primary replica.
 * <p/>
 * Should only call this method on secondary replicas.
 */
void replayWALEntry(WALEntry entry, CellScanner cells) throws IOException {
    long timeout = -1L;
    Optional<RpcCall> call = RpcServer.getCurrentCall();
    if (call.isPresent()) {
        long deadline = call.get().getDeadline();
        if (deadline < Long.MAX_VALUE) {
            timeout = deadline - EnvironmentEdgeManager.currentTime();
            if (timeout <= 0) {
                throw new TimeoutIOException("Timeout while replaying edits for " + getRegionInfo());
            }
        }
    }
    if (timeout > 0) {
        try {
            if (!replayLock.tryLock(timeout, TimeUnit.MILLISECONDS)) {
                throw new TimeoutIOException("Timeout while waiting for lock when replaying edits for " + getRegionInfo());
            }
        } catch (InterruptedException e) {
            throw throwOnInterrupt(e);
        }
    } else {
        replayLock.lock();
    }
    try {
        int count = entry.getAssociatedCellCount();
        long sequenceId = entry.getKey().getLogSequenceNumber();
        if (lastReplayedSequenceId >= sequenceId) {
            // need apply later WALEntries
            for (int i = 0; i < count; i++) {
                // Throw index out of bounds if our cell count is off
                if (!cells.advance()) {
                    throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
                }
            }
            return;
        }
        Map<byte[], List<Cell>> family2Cells = new TreeMap<>(Bytes.BYTES_COMPARATOR);
        for (int i = 0; i < count; i++) {
            // Throw index out of bounds if our cell count is off
            if (!cells.advance()) {
                throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
            }
            Cell cell = cells.current();
            if (WALEdit.isMetaEditFamily(cell)) {
                // guard logic to make sure we do not break things in the worst case.
                if (!family2Cells.isEmpty()) {
                    replayWALBatchMutate(family2Cells);
                    family2Cells.clear();
                }
                replayWALMetaEdit(cell);
            } else {
                family2Cells.computeIfAbsent(CellUtil.cloneFamily(cell), k -> new ArrayList<>()).add(cell);
            }
        }
        // do not forget to apply the remaining cells
        if (!family2Cells.isEmpty()) {
            replayWALBatchMutate(family2Cells);
        }
        mvcc.advanceTo(sequenceId);
        lastReplayedSequenceId = sequenceId;
    } finally {
        replayLock.unlock();
    }
}
Also used : StoreSequenceId(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) ForbidMajorCompactionChecker(org.apache.hadoop.hbase.regionserver.compactions.ForbidMajorCompactionChecker) FileSystem(org.apache.hadoop.fs.FileSystem) FileStatus(org.apache.hadoop.fs.FileStatus) ThroughputController(org.apache.hadoop.hbase.regionserver.throttle.ThroughputController) TableDescriptorChecker(org.apache.hadoop.hbase.util.TableDescriptorChecker) Future(java.util.concurrent.Future) Delete(org.apache.hadoop.hbase.client.Delete) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) CompoundConfiguration(org.apache.hadoop.hbase.CompoundConfiguration) REGION_NAMES_KEY(org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REGION_NAMES_KEY) Closeables(org.apache.hbase.thirdparty.com.google.common.io.Closeables) FailedSanityCheckException(org.apache.hadoop.hbase.exceptions.FailedSanityCheckException) Pair(org.apache.hadoop.hbase.util.Pair) Append(org.apache.hadoop.hbase.client.Append) TextFormat(org.apache.hbase.thirdparty.com.google.protobuf.TextFormat) CommonFSUtils(org.apache.hadoop.hbase.util.CommonFSUtils) NoLimitThroughputController(org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController) CellBuilderType(org.apache.hadoop.hbase.CellBuilderType) RpcCallback(org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback) Stream(java.util.stream.Stream) CompareOperator(org.apache.hadoop.hbase.CompareOperator) CellComparator(org.apache.hadoop.hbase.CellComparator) RegionTooBusyException(org.apache.hadoop.hbase.RegionTooBusyException) WALFactory(org.apache.hadoop.hbase.wal.WALFactory) Constructor(java.lang.reflect.Constructor) WriteEntry(org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry) WALUtil(org.apache.hadoop.hbase.regionserver.wal.WALUtil) ForeignExceptionSnare(org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare) Tag(org.apache.hadoop.hbase.Tag) TaskMonitor(org.apache.hadoop.hbase.monitoring.TaskMonitor) ServerCall(org.apache.hadoop.hbase.ipc.ServerCall) RegionLoad(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate) Service(org.apache.hbase.thirdparty.com.google.protobuf.Service) IOException(java.io.IOException) RegionServerSpaceQuotaManager(org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager) ReplicationObserver(org.apache.hadoop.hbase.replication.regionserver.ReplicationObserver) ExecutionException(java.util.concurrent.ExecutionException) TreeMap(java.util.TreeMap) CompactionDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) Nullable(edu.umd.cs.findbugs.annotations.Nullable) MonitoredTask(org.apache.hadoop.hbase.monitoring.MonitoredTask) CoprocessorHost(org.apache.hadoop.hbase.coprocessor.CoprocessorHost) WALKey(org.apache.hadoop.hbase.wal.WALKey) SnapshotDescriptionUtils(org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils) RegionEventDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) FlushDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) FlushAction(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction) MobFileCache(org.apache.hadoop.hbase.mob.MobFileCache) Result(org.apache.hadoop.hbase.client.Result) HFileLink(org.apache.hadoop.hbase.io.HFileLink) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) CompletionService(java.util.concurrent.CompletionService) TagUtil(org.apache.hadoop.hbase.TagUtil) NotServingRegionException(org.apache.hadoop.hbase.NotServingRegionException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IsolationLevel(org.apache.hadoop.hbase.client.IsolationLevel) ThreadFactory(java.util.concurrent.ThreadFactory) ExtendedCellBuilderFactory(org.apache.hadoop.hbase.ExtendedCellBuilderFactory) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Maps(org.apache.hbase.thirdparty.com.google.common.collect.Maps) ServiceDescriptor(org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.ServiceDescriptor) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) UUID(java.util.UUID) NavigableMap(java.util.NavigableMap) Row(org.apache.hadoop.hbase.client.Row) Collectors(java.util.stream.Collectors) CollectionUtils(org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils) Objects(java.util.Objects) Entry(java.util.Map.Entry) CompactionThroughputControllerFactory(org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory) Filter(org.apache.hadoop.hbase.filter.Filter) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) MAJOR_COMPACTION_KEY(org.apache.hadoop.hbase.regionserver.HStoreFile.MAJOR_COMPACTION_KEY) Increment(org.apache.hadoop.hbase.client.Increment) TimeRange(org.apache.hadoop.hbase.io.TimeRange) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) RpcController(org.apache.hbase.thirdparty.com.google.protobuf.RpcController) Function(java.util.function.Function) ConcurrentMap(java.util.concurrent.ConcurrentMap) User(org.apache.hadoop.hbase.security.User) ServerRegionReplicaUtil(org.apache.hadoop.hbase.util.ServerRegionReplicaUtil) HashSet(java.util.HashSet) HConstants(org.apache.hadoop.hbase.HConstants) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) CompactionLifeCycleTracker(org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker) StringUtils(org.apache.hadoop.util.StringUtils) Threads(org.apache.hadoop.hbase.util.Threads) KeyValue(org.apache.hadoop.hbase.KeyValue) Bytes(org.apache.hadoop.hbase.util.Bytes) ConfigurationManager(org.apache.hadoop.hbase.conf.ConfigurationManager) Logger(org.slf4j.Logger) CoprocessorServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) Lists(org.apache.hbase.thirdparty.com.google.common.collect.Lists) UnknownProtocolException(org.apache.hadoop.hbase.exceptions.UnknownProtocolException) CellUtil(org.apache.hadoop.hbase.CellUtil) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Mutation(org.apache.hadoop.hbase.client.Mutation) Arrays(java.util.Arrays) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) WALSplitUtil(org.apache.hadoop.hbase.wal.WALSplitUtil) MethodDescriptor(org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor) CellComparatorImpl(org.apache.hadoop.hbase.CellComparatorImpl) WALProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos) WAL(org.apache.hadoop.hbase.wal.WAL) MetaCellComparator(org.apache.hadoop.hbase.MetaCellComparator) ReadWriteLock(java.util.concurrent.locks.ReadWriteLock) Cell(org.apache.hadoop.hbase.Cell) NonceKey(org.apache.hadoop.hbase.util.NonceKey) Iterables(org.apache.hbase.thirdparty.com.google.common.collect.Iterables) Get(org.apache.hadoop.hbase.client.Get) HeapSize(org.apache.hadoop.hbase.io.HeapSize) Set(java.util.Set) StandardCharsets(java.nio.charset.StandardCharsets) CellScanner(org.apache.hadoop.hbase.CellScanner) PrivateCellUtil(org.apache.hadoop.hbase.PrivateCellUtil) HashedBytes(org.apache.hadoop.hbase.util.HashedBytes) FSUtils(org.apache.hadoop.hbase.util.FSUtils) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell) Callable(java.util.concurrent.Callable) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) InterruptedIOException(java.io.InterruptedIOException) ArrayList(java.util.ArrayList) RandomAccess(java.util.RandomAccess) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ClassSize(org.apache.hadoop.hbase.util.ClassSize) ReadOnlyConfiguration(org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration) TraceUtil(org.apache.hadoop.hbase.trace.TraceUtil) WALEdit(org.apache.hadoop.hbase.wal.WALEdit) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Scan(org.apache.hadoop.hbase.client.Scan) Lock(java.util.concurrent.locks.Lock) OperationStatusCode(org.apache.hadoop.hbase.HConstants.OperationStatusCode) CompactionContext(org.apache.hadoop.hbase.regionserver.compactions.CompactionContext) UnsafeByteOperations(org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations) LoggerFactory(org.slf4j.LoggerFactory) ByteBuffer(java.nio.ByteBuffer) HDFSBlocksDistribution(org.apache.hadoop.hbase.HDFSBlocksDistribution) BlockCache(org.apache.hadoop.hbase.io.hfile.BlockCache) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Path(org.apache.hadoop.fs.Path) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) ParseException(java.text.ParseException) Durability(org.apache.hadoop.hbase.client.Durability) HFile(org.apache.hadoop.hbase.io.hfile.HFile) Span(io.opentelemetry.api.trace.Span) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) StoreFlushDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) NavigableSet(java.util.NavigableSet) CancelableProgressable(org.apache.hadoop.hbase.util.CancelableProgressable) RegionReplicationSink(org.apache.hadoop.hbase.regionserver.regionreplication.RegionReplicationSink) EOFException(java.io.EOFException) FileNotFoundException(java.io.FileNotFoundException) List(java.util.List) CompactionState(org.apache.hadoop.hbase.client.CompactionState) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) Optional(java.util.Optional) RowMutations(org.apache.hadoop.hbase.client.RowMutations) StoreHotnessProtector(org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) LongAdder(java.util.concurrent.atomic.LongAdder) ROW_LOCK_READ_LOCK_KEY(org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.ROW_LOCK_READ_LOCK_KEY) Preconditions(org.apache.hbase.thirdparty.com.google.common.base.Preconditions) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) HashMap(java.util.HashMap) SnapshotManifest(org.apache.hadoop.hbase.snapshot.SnapshotManifest) DroppedSnapshotException(org.apache.hadoop.hbase.DroppedSnapshotException) ConcurrentMapUtils.computeIfAbsent(org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent) MutationReplay(org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay) PropagatingConfigurationObserver(org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver) RpcServer(org.apache.hadoop.hbase.ipc.RpcServer) ReplicationUtils(org.apache.hadoop.hbase.replication.ReplicationUtils) REPLICATION_SCOPE_LOCAL(org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL) Iterator(java.util.Iterator) ReentrantLock(java.util.concurrent.locks.ReentrantLock) Put(org.apache.hadoop.hbase.client.Put) RegionReplicaUtil(org.apache.hadoop.hbase.client.RegionReplicaUtil) TimeUnit(java.util.concurrent.TimeUnit) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) EventType(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) WALKeyImpl(org.apache.hadoop.hbase.wal.WALKeyImpl) ArrayList(java.util.ArrayList) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) TreeMap(java.util.TreeMap) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell)

Example 7 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class CallRunner method run.

public void run() {
    try {
        if (call.disconnectSince() >= 0) {
            if (RpcServer.LOG.isDebugEnabled()) {
                RpcServer.LOG.debug(Thread.currentThread().getName() + ": skipped " + call);
            }
            return;
        }
        call.setStartTime(EnvironmentEdgeManager.currentTime());
        if (call.getStartTime() > call.getDeadline()) {
            RpcServer.LOG.warn("Dropping timed out call: " + call);
            this.rpcServer.getMetrics().callTimedOut();
            return;
        }
        this.status.setStatus("Setting up call");
        this.status.setConnection(call.getRemoteAddress().getHostAddress(), call.getRemotePort());
        if (RpcServer.LOG.isTraceEnabled()) {
            Optional<User> remoteUser = call.getRequestUser();
            RpcServer.LOG.trace(call.toShortString() + " executing as " + (remoteUser.isPresent() ? remoteUser.get().getName() : "NULL principal"));
        }
        Throwable errorThrowable = null;
        String error = null;
        Pair<Message, CellScanner> resultPair = null;
        RpcServer.CurCall.set(call);
        Span span = new IpcServerSpanBuilder(call).build();
        try (Scope traceScope = span.makeCurrent()) {
            if (!this.rpcServer.isStarted()) {
                InetSocketAddress address = rpcServer.getListenerAddress();
                throw new ServerNotRunningYetException("Server " + (address != null ? address : "(channel closed)") + " is not running yet");
            }
            // make the call
            resultPair = this.rpcServer.call(call, this.status);
        } catch (TimeoutIOException e) {
            RpcServer.LOG.warn("Can not complete this request in time, drop it: " + call);
            TraceUtil.setError(span, e);
            return;
        } catch (Throwable e) {
            TraceUtil.setError(span, e);
            if (e instanceof ServerNotRunningYetException) {
                // If ServerNotRunningYetException, don't spew stack trace.
                if (RpcServer.LOG.isTraceEnabled()) {
                    RpcServer.LOG.trace(call.toShortString(), e);
                }
            } else {
                // Don't dump full exception.. just String version
                RpcServer.LOG.debug(call.toShortString() + ", exception=" + e);
            }
            errorThrowable = e;
            error = StringUtils.stringifyException(e);
            if (e instanceof Error) {
                throw (Error) e;
            }
        } finally {
            RpcServer.CurCall.set(null);
            if (resultPair != null) {
                this.rpcServer.addCallSize(call.getSize() * -1);
                span.setStatus(StatusCode.OK);
                sucessful = true;
            }
            span.end();
        }
        this.status.markComplete("To send response");
        // return back the RPC request read BB we can do here. It is done by now.
        call.cleanup();
        // Set the response
        Message param = resultPair != null ? resultPair.getFirst() : null;
        CellScanner cells = resultPair != null ? resultPair.getSecond() : null;
        call.setResponse(param, cells, errorThrowable, error);
        call.sendResponseIfReady();
    } catch (OutOfMemoryError e) {
        if (this.rpcServer.getErrorHandler() != null) {
            if (this.rpcServer.getErrorHandler().checkOOME(e)) {
                RpcServer.LOG.info(Thread.currentThread().getName() + ": exiting on OutOfMemoryError");
                return;
            }
        } else {
            // rethrow if no handler
            throw e;
        }
    } catch (ClosedChannelException cce) {
        InetSocketAddress address = rpcServer.getListenerAddress();
        RpcServer.LOG.warn(Thread.currentThread().getName() + ": caught a ClosedChannelException, " + "this means that the server " + (address != null ? address : "(channel closed)") + " was processing a request but the client went away. The error message was: " + cce.getMessage());
    } catch (Exception e) {
        RpcServer.LOG.warn(Thread.currentThread().getName() + ": caught: " + StringUtils.stringifyException(e));
    } finally {
        if (!sucessful) {
            this.rpcServer.addCallSize(call.getSize() * -1);
        }
        if (this.status.isRPCRunning()) {
            this.status.markComplete("Call error");
        }
        this.status.pause("Waiting for a call");
        cleanup();
    }
}
Also used : ClosedChannelException(java.nio.channels.ClosedChannelException) User(org.apache.hadoop.hbase.security.User) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) InetSocketAddress(java.net.InetSocketAddress) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) CellScanner(org.apache.hadoop.hbase.CellScanner) Span(io.opentelemetry.api.trace.Span) ClosedChannelException(java.nio.channels.ClosedChannelException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) CallDroppedException(org.apache.hadoop.hbase.CallDroppedException) Scope(io.opentelemetry.context.Scope) IpcServerSpanBuilder(org.apache.hadoop.hbase.server.trace.IpcServerSpanBuilder)

Example 8 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class AbstractFSWAL method shutdown.

@Override
public void shutdown() throws IOException {
    if (!shutdown.compareAndSet(false, true)) {
        return;
    }
    closed = true;
    // Tell our listeners that the log is closing
    if (!this.listeners.isEmpty()) {
        for (WALActionsListener i : this.listeners) {
            i.logCloseRequested();
        }
    }
    Future<Void> future = logArchiveOrShutdownExecutor.submit(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            if (rollWriterLock.tryLock(walShutdownTimeout, TimeUnit.SECONDS)) {
                try {
                    doShutdown();
                    if (syncFutureCache != null) {
                        syncFutureCache.clear();
                    }
                } finally {
                    rollWriterLock.unlock();
                }
            } else {
                throw new IOException("Waiting for rollWriterLock timeout");
            }
            return null;
        }
    });
    logArchiveOrShutdownExecutor.shutdown();
    try {
        future.get(walShutdownTimeout, TimeUnit.MILLISECONDS);
    } catch (InterruptedException e) {
        throw new InterruptedIOException("Interrupted when waiting for shutdown WAL");
    } catch (TimeoutException e) {
        throw new TimeoutIOException("We have waited " + walShutdownTimeout + "ms, but" + " the shutdown of WAL doesn't complete! Please check the status of underlying " + "filesystem or increase the wait time by the config \"" + WAL_SHUTDOWN_WAIT_TIMEOUT_MS + "\"", e);
    } catch (ExecutionException e) {
        if (e.getCause() instanceof IOException) {
            throw (IOException) e.getCause();
        } else {
            throw new IOException(e.getCause());
        }
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) InterruptedIOException(java.io.InterruptedIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) IOException(java.io.IOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) FileNotFoundException(java.io.FileNotFoundException) InterruptedIOException(java.io.InterruptedIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException)

Example 9 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class AsyncRegionLocator method withTimeout.

private <T> CompletableFuture<T> withTimeout(CompletableFuture<T> future, long timeoutNs, Supplier<String> timeoutMsg) {
    if (future.isDone() || timeoutNs <= 0) {
        return future;
    }
    Timeout timeoutTask = retryTimer.newTimeout(t -> {
        if (future.isDone()) {
            return;
        }
        future.completeExceptionally(new TimeoutIOException(timeoutMsg.get()));
    }, timeoutNs, TimeUnit.NANOSECONDS);
    FutureUtils.addListener(future, (loc, error) -> {
        if (error != null && error.getClass() != TimeoutIOException.class) {
            // cancel timeout task if we are not completed by it.
            timeoutTask.cancel();
        }
    });
    return future;
}
Also used : Timeout(org.apache.hbase.thirdparty.io.netty.util.Timeout) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException)

Aggregations

TimeoutIOException (org.apache.hadoop.hbase.exceptions.TimeoutIOException)9 InterruptedIOException (java.io.InterruptedIOException)6 IOException (java.io.IOException)4 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 RpcCall (org.apache.hadoop.hbase.ipc.RpcCall)3 HashedBytes (org.apache.hadoop.hbase.util.HashedBytes)3 Span (io.opentelemetry.api.trace.Span)2 FileNotFoundException (java.io.FileNotFoundException)2 ExecutionException (java.util.concurrent.ExecutionException)2 CellScanner (org.apache.hadoop.hbase.CellScanner)2 User (org.apache.hadoop.hbase.security.User)2 Message (org.apache.hbase.thirdparty.com.google.protobuf.Message)2 Nullable (edu.umd.cs.findbugs.annotations.Nullable)1 Scope (io.opentelemetry.context.Scope)1 EOFException (java.io.EOFException)1 Constructor (java.lang.reflect.Constructor)1 InetSocketAddress (java.net.InetSocketAddress)1 ByteBuffer (java.nio.ByteBuffer)1 ClosedChannelException (java.nio.channels.ClosedChannelException)1 StandardCharsets (java.nio.charset.StandardCharsets)1