Search in sources :

Example 6 with RpcCall

use of org.apache.hadoop.hbase.ipc.RpcCall in project hbase by apache.

the class HRegion method replayWALEntry.

/**
 * Replay remote wal entry sent by primary replica.
 * <p/>
 * Should only call this method on secondary replicas.
 */
void replayWALEntry(WALEntry entry, CellScanner cells) throws IOException {
    long timeout = -1L;
    Optional<RpcCall> call = RpcServer.getCurrentCall();
    if (call.isPresent()) {
        long deadline = call.get().getDeadline();
        if (deadline < Long.MAX_VALUE) {
            timeout = deadline - EnvironmentEdgeManager.currentTime();
            if (timeout <= 0) {
                throw new TimeoutIOException("Timeout while replaying edits for " + getRegionInfo());
            }
        }
    }
    if (timeout > 0) {
        try {
            if (!replayLock.tryLock(timeout, TimeUnit.MILLISECONDS)) {
                throw new TimeoutIOException("Timeout while waiting for lock when replaying edits for " + getRegionInfo());
            }
        } catch (InterruptedException e) {
            throw throwOnInterrupt(e);
        }
    } else {
        replayLock.lock();
    }
    try {
        int count = entry.getAssociatedCellCount();
        long sequenceId = entry.getKey().getLogSequenceNumber();
        if (lastReplayedSequenceId >= sequenceId) {
            // need apply later WALEntries
            for (int i = 0; i < count; i++) {
                // Throw index out of bounds if our cell count is off
                if (!cells.advance()) {
                    throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
                }
            }
            return;
        }
        Map<byte[], List<Cell>> family2Cells = new TreeMap<>(Bytes.BYTES_COMPARATOR);
        for (int i = 0; i < count; i++) {
            // Throw index out of bounds if our cell count is off
            if (!cells.advance()) {
                throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
            }
            Cell cell = cells.current();
            if (WALEdit.isMetaEditFamily(cell)) {
                // guard logic to make sure we do not break things in the worst case.
                if (!family2Cells.isEmpty()) {
                    replayWALBatchMutate(family2Cells);
                    family2Cells.clear();
                }
                replayWALMetaEdit(cell);
            } else {
                family2Cells.computeIfAbsent(CellUtil.cloneFamily(cell), k -> new ArrayList<>()).add(cell);
            }
        }
        // do not forget to apply the remaining cells
        if (!family2Cells.isEmpty()) {
            replayWALBatchMutate(family2Cells);
        }
        mvcc.advanceTo(sequenceId);
        lastReplayedSequenceId = sequenceId;
    } finally {
        replayLock.unlock();
    }
}
Also used : StoreSequenceId(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) ForbidMajorCompactionChecker(org.apache.hadoop.hbase.regionserver.compactions.ForbidMajorCompactionChecker) FileSystem(org.apache.hadoop.fs.FileSystem) FileStatus(org.apache.hadoop.fs.FileStatus) ThroughputController(org.apache.hadoop.hbase.regionserver.throttle.ThroughputController) TableDescriptorChecker(org.apache.hadoop.hbase.util.TableDescriptorChecker) Future(java.util.concurrent.Future) Delete(org.apache.hadoop.hbase.client.Delete) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) CompoundConfiguration(org.apache.hadoop.hbase.CompoundConfiguration) REGION_NAMES_KEY(org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REGION_NAMES_KEY) Closeables(org.apache.hbase.thirdparty.com.google.common.io.Closeables) FailedSanityCheckException(org.apache.hadoop.hbase.exceptions.FailedSanityCheckException) Pair(org.apache.hadoop.hbase.util.Pair) Append(org.apache.hadoop.hbase.client.Append) TextFormat(org.apache.hbase.thirdparty.com.google.protobuf.TextFormat) CommonFSUtils(org.apache.hadoop.hbase.util.CommonFSUtils) NoLimitThroughputController(org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController) CellBuilderType(org.apache.hadoop.hbase.CellBuilderType) RpcCallback(org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback) Stream(java.util.stream.Stream) CompareOperator(org.apache.hadoop.hbase.CompareOperator) CellComparator(org.apache.hadoop.hbase.CellComparator) RegionTooBusyException(org.apache.hadoop.hbase.RegionTooBusyException) WALFactory(org.apache.hadoop.hbase.wal.WALFactory) Constructor(java.lang.reflect.Constructor) WriteEntry(org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry) WALUtil(org.apache.hadoop.hbase.regionserver.wal.WALUtil) ForeignExceptionSnare(org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare) Tag(org.apache.hadoop.hbase.Tag) TaskMonitor(org.apache.hadoop.hbase.monitoring.TaskMonitor) ServerCall(org.apache.hadoop.hbase.ipc.ServerCall) RegionLoad(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate) Service(org.apache.hbase.thirdparty.com.google.protobuf.Service) IOException(java.io.IOException) RegionServerSpaceQuotaManager(org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager) ReplicationObserver(org.apache.hadoop.hbase.replication.regionserver.ReplicationObserver) ExecutionException(java.util.concurrent.ExecutionException) TreeMap(java.util.TreeMap) CompactionDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor) Nullable(edu.umd.cs.findbugs.annotations.Nullable) MonitoredTask(org.apache.hadoop.hbase.monitoring.MonitoredTask) CoprocessorHost(org.apache.hadoop.hbase.coprocessor.CoprocessorHost) WALKey(org.apache.hadoop.hbase.wal.WALKey) SnapshotDescriptionUtils(org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils) RegionEventDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) FlushDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor) FlushAction(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction) MobFileCache(org.apache.hadoop.hbase.mob.MobFileCache) Result(org.apache.hadoop.hbase.client.Result) HFileLink(org.apache.hadoop.hbase.io.HFileLink) Message(org.apache.hbase.thirdparty.com.google.protobuf.Message) CompletionService(java.util.concurrent.CompletionService) TagUtil(org.apache.hadoop.hbase.TagUtil) NotServingRegionException(org.apache.hadoop.hbase.NotServingRegionException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IsolationLevel(org.apache.hadoop.hbase.client.IsolationLevel) ThreadFactory(java.util.concurrent.ThreadFactory) ExtendedCellBuilderFactory(org.apache.hadoop.hbase.ExtendedCellBuilderFactory) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Maps(org.apache.hbase.thirdparty.com.google.common.collect.Maps) ServiceDescriptor(org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.ServiceDescriptor) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) UUID(java.util.UUID) NavigableMap(java.util.NavigableMap) Row(org.apache.hadoop.hbase.client.Row) Collectors(java.util.stream.Collectors) CollectionUtils(org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils) Objects(java.util.Objects) Entry(java.util.Map.Entry) CompactionThroughputControllerFactory(org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory) Filter(org.apache.hadoop.hbase.filter.Filter) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) MAJOR_COMPACTION_KEY(org.apache.hadoop.hbase.regionserver.HStoreFile.MAJOR_COMPACTION_KEY) Increment(org.apache.hadoop.hbase.client.Increment) TimeRange(org.apache.hadoop.hbase.io.TimeRange) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) RpcController(org.apache.hbase.thirdparty.com.google.protobuf.RpcController) Function(java.util.function.Function) ConcurrentMap(java.util.concurrent.ConcurrentMap) User(org.apache.hadoop.hbase.security.User) ServerRegionReplicaUtil(org.apache.hadoop.hbase.util.ServerRegionReplicaUtil) HashSet(java.util.HashSet) HConstants(org.apache.hadoop.hbase.HConstants) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) CompactionLifeCycleTracker(org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker) StringUtils(org.apache.hadoop.util.StringUtils) Threads(org.apache.hadoop.hbase.util.Threads) KeyValue(org.apache.hadoop.hbase.KeyValue) Bytes(org.apache.hadoop.hbase.util.Bytes) ConfigurationManager(org.apache.hadoop.hbase.conf.ConfigurationManager) Logger(org.slf4j.Logger) CoprocessorServiceCall(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall) Lists(org.apache.hbase.thirdparty.com.google.common.collect.Lists) UnknownProtocolException(org.apache.hadoop.hbase.exceptions.UnknownProtocolException) CellUtil(org.apache.hadoop.hbase.CellUtil) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) Mutation(org.apache.hadoop.hbase.client.Mutation) Arrays(java.util.Arrays) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) WALSplitUtil(org.apache.hadoop.hbase.wal.WALSplitUtil) MethodDescriptor(org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor) CellComparatorImpl(org.apache.hadoop.hbase.CellComparatorImpl) WALProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos) WAL(org.apache.hadoop.hbase.wal.WAL) MetaCellComparator(org.apache.hadoop.hbase.MetaCellComparator) ReadWriteLock(java.util.concurrent.locks.ReadWriteLock) Cell(org.apache.hadoop.hbase.Cell) NonceKey(org.apache.hadoop.hbase.util.NonceKey) Iterables(org.apache.hbase.thirdparty.com.google.common.collect.Iterables) Get(org.apache.hadoop.hbase.client.Get) HeapSize(org.apache.hadoop.hbase.io.HeapSize) Set(java.util.Set) StandardCharsets(java.nio.charset.StandardCharsets) CellScanner(org.apache.hadoop.hbase.CellScanner) PrivateCellUtil(org.apache.hadoop.hbase.PrivateCellUtil) HashedBytes(org.apache.hadoop.hbase.util.HashedBytes) FSUtils(org.apache.hadoop.hbase.util.FSUtils) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell) Callable(java.util.concurrent.Callable) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) InterruptedIOException(java.io.InterruptedIOException) ArrayList(java.util.ArrayList) RandomAccess(java.util.RandomAccess) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ClassSize(org.apache.hadoop.hbase.util.ClassSize) ReadOnlyConfiguration(org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration) TraceUtil(org.apache.hadoop.hbase.trace.TraceUtil) WALEdit(org.apache.hadoop.hbase.wal.WALEdit) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) CheckAndMutateResult(org.apache.hadoop.hbase.client.CheckAndMutateResult) Scan(org.apache.hadoop.hbase.client.Scan) Lock(java.util.concurrent.locks.Lock) OperationStatusCode(org.apache.hadoop.hbase.HConstants.OperationStatusCode) CompactionContext(org.apache.hadoop.hbase.regionserver.compactions.CompactionContext) UnsafeByteOperations(org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations) LoggerFactory(org.slf4j.LoggerFactory) ByteBuffer(java.nio.ByteBuffer) HDFSBlocksDistribution(org.apache.hadoop.hbase.HDFSBlocksDistribution) BlockCache(org.apache.hadoop.hbase.io.hfile.BlockCache) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Path(org.apache.hadoop.fs.Path) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) ParseException(java.text.ParseException) Durability(org.apache.hadoop.hbase.client.Durability) HFile(org.apache.hadoop.hbase.io.hfile.HFile) Span(io.opentelemetry.api.trace.Span) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) StoreFlushDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor) NavigableSet(java.util.NavigableSet) CancelableProgressable(org.apache.hadoop.hbase.util.CancelableProgressable) RegionReplicationSink(org.apache.hadoop.hbase.regionserver.regionreplication.RegionReplicationSink) EOFException(java.io.EOFException) FileNotFoundException(java.io.FileNotFoundException) List(java.util.List) CompactionState(org.apache.hadoop.hbase.client.CompactionState) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) Optional(java.util.Optional) RowMutations(org.apache.hadoop.hbase.client.RowMutations) StoreHotnessProtector(org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) SnapshotDescription(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription) LongAdder(java.util.concurrent.atomic.LongAdder) ROW_LOCK_READ_LOCK_KEY(org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.ROW_LOCK_READ_LOCK_KEY) Preconditions(org.apache.hbase.thirdparty.com.google.common.base.Preconditions) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) HashMap(java.util.HashMap) SnapshotManifest(org.apache.hadoop.hbase.snapshot.SnapshotManifest) DroppedSnapshotException(org.apache.hadoop.hbase.DroppedSnapshotException) ConcurrentMapUtils.computeIfAbsent(org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent) MutationReplay(org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay) PropagatingConfigurationObserver(org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver) RpcServer(org.apache.hadoop.hbase.ipc.RpcServer) ReplicationUtils(org.apache.hadoop.hbase.replication.ReplicationUtils) REPLICATION_SCOPE_LOCAL(org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL) Iterator(java.util.Iterator) ReentrantLock(java.util.concurrent.locks.ReentrantLock) Put(org.apache.hadoop.hbase.client.Put) RegionReplicaUtil(org.apache.hadoop.hbase.client.RegionReplicaUtil) TimeUnit(java.util.concurrent.TimeUnit) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) EventType(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) WALKeyImpl(org.apache.hadoop.hbase.wal.WALKeyImpl) ArrayList(java.util.ArrayList) TimeoutIOException(org.apache.hadoop.hbase.exceptions.TimeoutIOException) TreeMap(java.util.TreeMap) RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell) ByteBufferExtendedCell(org.apache.hadoop.hbase.ByteBufferExtendedCell)

Example 7 with RpcCall

use of org.apache.hadoop.hbase.ipc.RpcCall in project hbase by apache.

the class RSRpcServices method getRemoteClientIpAndPort.

/**
 * @return Remote client's ip and port else null if can't be determined.
 */
@RestrictedApi(explanation = "Should only be called in TestRSRpcServices and RSRpcServices", link = "", allowedOnPath = ".*(TestRSRpcServices|RSRpcServices).java")
static String getRemoteClientIpAndPort() {
    RpcCall rpcCall = RpcServer.getCurrentCall().orElse(null);
    if (rpcCall == null) {
        return HConstants.EMPTY_STRING;
    }
    InetAddress address = rpcCall.getRemoteAddress();
    if (address == null) {
        return HConstants.EMPTY_STRING;
    }
    // scanning than a hostname anyways.
    return Address.fromParts(address.getHostAddress(), rpcCall.getRemotePort()).toString();
}
Also used : RpcCall(org.apache.hadoop.hbase.ipc.RpcCall) InetAddress(java.net.InetAddress) RestrictedApi(com.google.errorprone.annotations.RestrictedApi)

Aggregations

RpcCall (org.apache.hadoop.hbase.ipc.RpcCall)7 IOException (java.io.IOException)3 InterruptedIOException (java.io.InterruptedIOException)3 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 TimeoutIOException (org.apache.hadoop.hbase.exceptions.TimeoutIOException)3 HashedBytes (org.apache.hadoop.hbase.util.HashedBytes)3 InetAddress (java.net.InetAddress)2 RestrictedApi (com.google.errorprone.annotations.RestrictedApi)1 Nullable (edu.umd.cs.findbugs.annotations.Nullable)1 Span (io.opentelemetry.api.trace.Span)1 EOFException (java.io.EOFException)1 FileNotFoundException (java.io.FileNotFoundException)1 Constructor (java.lang.reflect.Constructor)1 ByteBuffer (java.nio.ByteBuffer)1 StandardCharsets (java.nio.charset.StandardCharsets)1 ParseException (java.text.ParseException)1 ArrayList (java.util.ArrayList)1 Arrays (java.util.Arrays)1 Collection (java.util.Collection)1 Collections (java.util.Collections)1