Search in sources :

Example 1 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project phoenix by apache.

the class LockManager method lockRow.

/**
 * Lock the row or throw otherwise
 * @param row the row key
 * @return RowLock used to eventually release the lock
 * @throws TimeoutIOException if the lock could not be acquired within the
 * allowed rowLockWaitDuration and InterruptedException if interrupted while
 * waiting to acquire lock.
 */
public RowLock lockRow(byte[] row, int waitDuration) throws IOException {
    // create an object to use a a key in the row lock map
    ImmutableBytesPtr rowKey = new ImmutableBytesPtr(row);
    RowLockContext rowLockContext = null;
    RowLockImpl result = null;
    TraceScope traceScope = null;
    // If we're tracing start a span to show how long this took.
    if (Trace.isTracing()) {
        traceScope = Trace.startSpan("LockManager.getRowLock");
        traceScope.getSpan().addTimelineAnnotation("Getting a lock");
    }
    boolean success = false;
    try {
        // TODO: do we need to add a time component here?
        while (result == null) {
            // Try adding a RowLockContext to the lockedRows.
            // If we can add it then there's no other transactions currently running.
            rowLockContext = new RowLockContext(rowKey);
            RowLockContext existingContext = lockedRows.putIfAbsent(rowKey, rowLockContext);
            // if there was a running transaction then there's already a context.
            if (existingContext != null) {
                rowLockContext = existingContext;
            }
            result = rowLockContext.newRowLock();
        }
        if (!result.getLock().tryLock(waitDuration, TimeUnit.MILLISECONDS)) {
            if (traceScope != null) {
                traceScope.getSpan().addTimelineAnnotation("Failed to get row lock");
            }
            throw new TimeoutIOException("Timed out waiting for lock for row: " + rowKey);
        }
        rowLockContext.setThreadName(Thread.currentThread().getName());
        success = true;
        return result;
    } catch (InterruptedException ie) {
        LOG.warn("Thread interrupted waiting for lock on row: " + rowKey);
        InterruptedIOException iie = new InterruptedIOException();
        iie.initCause(ie);
        if (traceScope != null) {
            traceScope.getSpan().addTimelineAnnotation("Interrupted exception getting row lock");
        }
        Thread.currentThread().interrupt();
        throw iie;
    } finally {
        // On failure, clean up the counts just in case this was the thing keeping the context alive.
        if (!success && rowLockContext != null)
            rowLockContext.cleanUp();
        if (traceScope != null) {
            traceScope.close();
        }
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) TraceScope(org.apache.htrace.TraceScope) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException)

Example 2 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class Procedure method setTimeoutFailure.

/**
 * Called by the ProcedureExecutor when the timeout set by setTimeout() is expired.
 * <p/>
 * Another usage for this method is to implement retrying. A procedure can set the state to
 * {@code WAITING_TIMEOUT} by calling {@code setState} method, and throw a
 * {@link ProcedureSuspendedException} to halt the execution of the procedure, and do not forget a
 * call {@link #setTimeout(int)} method to set the timeout. And you should also override this
 * method to wake up the procedure, and also return false to tell the ProcedureExecutor that the
 * timeout event has been handled.
 * @return true to let the framework handle the timeout as abort, false in case the procedure
 *         handled the timeout itself.
 */
protected synchronized boolean setTimeoutFailure(TEnvironment env) {
    if (state == ProcedureState.WAITING_TIMEOUT) {
        long timeDiff = EnvironmentEdgeManager.currentTime() - lastUpdate;
        setFailure("ProcedureExecutor", new TimeoutIOException("Operation timed out after " + StringUtils.humanTimeDiff(timeDiff)));
        return true;
    }
    return false;
}
Also used : TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException)

Example 3 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class HRegion method getRowLockInternal.

// will be override in tests
protected RowLock getRowLockInternal(byte[] row, boolean readLock, RowLock prevRowLock) throws IOException {
    // create an object to use a a key in the row lock map
    HashedBytes rowKey = new HashedBytes(row);
    RowLockContext rowLockContext = null;
    RowLockImpl result = null;
    boolean success = false;
    try {
        // TODO: do we need to add a time component here?
        while (result == null) {
            rowLockContext = computeIfAbsent(lockedRows, rowKey, () -> new RowLockContext(rowKey));
            // This can fail as
            if (readLock) {
                // For read lock, if the caller has locked the same row previously, it will not try
                // to acquire the same read lock. It simply returns the previous row lock.
                RowLockImpl prevRowLockImpl = (RowLockImpl) prevRowLock;
                if ((prevRowLockImpl != null) && (prevRowLockImpl.getLock() == rowLockContext.readWriteLock.readLock())) {
                    success = true;
                    return prevRowLock;
                }
                result = rowLockContext.newReadLock();
            } else {
                result = rowLockContext.newWriteLock();
            }
        }
        int timeout = rowLockWaitDuration;
        boolean reachDeadlineFirst = false;
        Optional<RpcCall> call = RpcServer.getCurrentCall();
        if (call.isPresent()) {
            long deadline = call.get().getDeadline();
            if (deadline < Long.MAX_VALUE) {
                int timeToDeadline = (int) (deadline - EnvironmentEdgeManager.currentTime());
                if (timeToDeadline <= this.rowLockWaitDuration) {
                    reachDeadlineFirst = true;
                    timeout = timeToDeadline;
                }
            }
        }
        if (timeout <= 0 || !result.getLock().tryLock(timeout, TimeUnit.MILLISECONDS)) {
            String message = "Timed out waiting for lock for row: " + rowKey + " in region " + getRegionInfo().getEncodedName();
            if (reachDeadlineFirst) {
                throw new TimeoutIOException(message);
            } else {
                // If timeToDeadline is larger than rowLockWaitDuration, we can not drop the request.
                throw new IOException(message);
            }
        }
        rowLockContext.setThreadName(Thread.currentThread().getName());
        success = true;
        return result;
    } catch (InterruptedException ie) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Thread interrupted waiting for lock on row: {}, in region {}", rowKey, getRegionInfo().getRegionNameAsString());
        }
        throw throwOnInterrupt(ie);
    } catch (Error error) {
        // The maximum lock count for read lock is 64K (hardcoded), when this maximum count
        // is reached, it will throw out an Error. This Error needs to be caught so it can
        // go ahead to process the minibatch with lock acquired.
        LOG.warn("Error to get row lock for {}, in region {}, cause: {}", Bytes.toStringBinary(row), getRegionInfo().getRegionNameAsString(), error);
        IOException ioe = new IOException(error);
        throw ioe;
    } finally {
        // Clean up the counts just in case this was the thing keeping the context alive.
        if (!success && rowLockContext != null) {
            rowLockContext.cleanUp();
        }
    }
}
Also used : IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) InterruptedIOException(java.io.InterruptedIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) HashedBytes(org.apache.hadoop.hbase.util.HashedBytes) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall)

Example 4 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class ProcedureSyncWait method waitFor.

public static <T> T waitFor(MasterProcedureEnv env, long waitTime, long waitingTimeForEvents, String purpose, Predicate<T> predicate) throws IOException {
    long done = EnvironmentEdgeManager.currentTime() + waitTime;
    if (done <= 0) {
        // long overflow, usually this means we pass Long.MAX_VALUE as waitTime
        done = Long.MAX_VALUE;
    }
    boolean logged = false;
    do {
        T result = predicate.evaluate();
        if (result != null && !result.equals(Boolean.FALSE)) {
            return result;
        }
        try {
            Thread.sleep(waitingTimeForEvents);
        } catch (InterruptedException e) {
            LOG.warn("Interrupted while sleeping, waiting on " + purpose);
            throw (InterruptedIOException) new InterruptedIOException().initCause(e);
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("waitFor " + purpose);
        } else {
            if (!logged)
                LOG.debug("waitFor " + purpose);
        }
        logged = true;
    } while (EnvironmentEdgeManager.currentTime() < done && env.isRunning());
    throw new TimeoutIOException("Timed out while waiting on " + purpose);
}
Also used : InterruptedIOException(java.io.InterruptedIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException)

Example 5 with TimeoutIOException

use of org.apache.hadoop.hbase.exceptions.TimeoutIOException in project hbase by apache.

the class HRegion method getRowLockInternal.

protected RowLock getRowLockInternal(byte[] row, boolean readLock) throws IOException {
    // create an object to use a a key in the row lock map
    HashedBytes rowKey = new HashedBytes(row);
    RowLockContext rowLockContext = null;
    RowLockImpl result = null;
    TraceScope traceScope = null;
    // If we're tracing start a span to show how long this took.
    if (Trace.isTracing()) {
        traceScope = Trace.startSpan("HRegion.getRowLock");
        traceScope.getSpan().addTimelineAnnotation("Getting a " + (readLock ? "readLock" : "writeLock"));
    }
    try {
        // TODO: do we need to add a time component here?
        while (result == null) {
            rowLockContext = computeIfAbsent(lockedRows, rowKey, () -> new RowLockContext(rowKey));
            // This can fail as
            if (readLock) {
                result = rowLockContext.newReadLock();
            } else {
                result = rowLockContext.newWriteLock();
            }
        }
        int timeout = rowLockWaitDuration;
        boolean reachDeadlineFirst = false;
        RpcCall call = RpcServer.getCurrentCall();
        if (call != null && call.getDeadline() < Long.MAX_VALUE) {
            int timeToDeadline = (int) (call.getDeadline() - System.currentTimeMillis());
            if (timeToDeadline <= this.rowLockWaitDuration) {
                reachDeadlineFirst = true;
                timeout = timeToDeadline;
            }
        }
        if (timeout <= 0 || !result.getLock().tryLock(timeout, TimeUnit.MILLISECONDS)) {
            if (traceScope != null) {
                traceScope.getSpan().addTimelineAnnotation("Failed to get row lock");
            }
            result = null;
            // Clean up the counts just in case this was the thing keeping the context alive.
            rowLockContext.cleanUp();
            String message = "Timed out waiting for lock for row: " + rowKey + " in region " + getRegionInfo().getEncodedName();
            if (reachDeadlineFirst) {
                throw new TimeoutIOException(message);
            } else {
                // If timeToDeadline is larger than rowLockWaitDuration, we can not drop the request.
                throw new IOException(message);
            }
        }
        rowLockContext.setThreadName(Thread.currentThread().getName());
        return result;
    } catch (InterruptedException ie) {
        LOG.warn("Thread interrupted waiting for lock on row: " + rowKey);
        InterruptedIOException iie = new InterruptedIOException();
        iie.initCause(ie);
        if (traceScope != null) {
            traceScope.getSpan().addTimelineAnnotation("Interrupted exception getting row lock");
        }
        Thread.currentThread().interrupt();
        throw iie;
    } finally {
        if (traceScope != null) {
            traceScope.close();
        }
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) TraceScope(org.apache.htrace.TraceScope) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) MultipleIOException(org.apache.hadoop.io.MultipleIOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) HashedBytes(org.apache.hadoop.hbase.util.HashedBytes)

Aggregations

TimeoutIOException (org.apache.hadoop.hbase.exceptions.TimeoutIOException)9 InterruptedIOException (java.io.InterruptedIOException)6 IOException (java.io.IOException)4 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 RpcCall (org.apache.hadoop.hbase.ipc.RpcCall)3 HashedBytes (org.apache.hadoop.hbase.util.HashedBytes)3 Span (io.opentelemetry.api.trace.Span)2 FileNotFoundException (java.io.FileNotFoundException)2 ExecutionException (java.util.concurrent.ExecutionException)2 CellScanner (org.apache.hadoop.hbase.CellScanner)2 User (org.apache.hadoop.hbase.security.User)2 Message (org.apache.hbase.thirdparty.com.google.protobuf.Message)2 Nullable (edu.umd.cs.findbugs.annotations.Nullable)1 Scope (io.opentelemetry.context.Scope)1 EOFException (java.io.EOFException)1 Constructor (java.lang.reflect.Constructor)1 InetSocketAddress (java.net.InetSocketAddress)1 ByteBuffer (java.nio.ByteBuffer)1 ClosedChannelException (java.nio.channels.ClosedChannelException)1 StandardCharsets (java.nio.charset.StandardCharsets)1