Search in sources :

Example 21 with ReentrantReadWriteLock

use of java.util.concurrent.locks.ReentrantReadWriteLock in project hbase by apache.

the class BucketCache method getBlock.

/**
   * Get the buffer of the block with the specified key.
   * @param key block's cache key
   * @param caching true if the caller caches blocks on cache misses
   * @param repeat Whether this is a repeat lookup for the same block
   * @param updateCacheMetrics Whether we should update cache metrics or not
   * @return buffer of specified cache key, or null if not in cache
   */
@Override
public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat, boolean updateCacheMetrics) {
    if (!cacheEnabled) {
        return null;
    }
    RAMQueueEntry re = ramCache.get(key);
    if (re != null) {
        if (updateCacheMetrics) {
            cacheStats.hit(caching, key.isPrimary(), key.getBlockType());
        }
        re.access(accessCount.incrementAndGet());
        return re.getData();
    }
    BucketEntry bucketEntry = backingMap.get(key);
    if (bucketEntry != null) {
        long start = System.nanoTime();
        ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());
        try {
            lock.readLock().lock();
            // existence here.
            if (bucketEntry.equals(backingMap.get(key))) {
                // TODO : change this area - should be removed after server cells and
                // 12295 are available
                int len = bucketEntry.getLength();
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Read offset=" + bucketEntry.offset() + ", len=" + len);
                }
                Cacheable cachedBlock = ioEngine.read(bucketEntry.offset(), len, bucketEntry.deserializerReference(this.deserialiserMap));
                long timeTaken = System.nanoTime() - start;
                if (updateCacheMetrics) {
                    cacheStats.hit(caching, key.isPrimary(), key.getBlockType());
                    cacheStats.ioHit(timeTaken);
                }
                if (cachedBlock.getMemoryType() == MemoryType.SHARED) {
                    bucketEntry.refCount.incrementAndGet();
                }
                bucketEntry.access(accessCount.incrementAndGet());
                if (this.ioErrorStartTime > 0) {
                    ioErrorStartTime = -1;
                }
                return cachedBlock;
            }
        } catch (IOException ioex) {
            LOG.error("Failed reading block " + key + " from bucket cache", ioex);
            checkIOErrorIsTolerated();
        } finally {
            lock.readLock().unlock();
        }
    }
    if (!repeat && updateCacheMetrics) {
        cacheStats.miss(caching, key.isPrimary(), key.getBlockType());
    }
    return null;
}
Also used : Cacheable(org.apache.hadoop.hbase.io.hfile.Cacheable) IOException(java.io.IOException) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock)

Example 22 with ReentrantReadWriteLock

use of java.util.concurrent.locks.ReentrantReadWriteLock in project hbase by apache.

the class BucketCache method evictBlock.

public boolean evictBlock(BlockCacheKey cacheKey, boolean deletedBlock) {
    if (!cacheEnabled) {
        return false;
    }
    RAMQueueEntry removedBlock = checkRamCache(cacheKey);
    BucketEntry bucketEntry = backingMap.get(cacheKey);
    if (bucketEntry == null) {
        if (removedBlock != null) {
            cacheStats.evicted(0, cacheKey.isPrimary());
            return true;
        } else {
            return false;
        }
    }
    ReentrantReadWriteLock lock = offsetLock.getLock(bucketEntry.offset());
    try {
        lock.writeLock().lock();
        int refCount = bucketEntry.refCount.get();
        if (refCount == 0) {
            if (backingMap.remove(cacheKey, bucketEntry)) {
                blockEvicted(cacheKey, bucketEntry, removedBlock == null);
            } else {
                return false;
            }
        } else {
            if (!deletedBlock) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("This block " + cacheKey + " is still referred by " + refCount + " readers. Can not be freed now");
                }
                return false;
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("This block " + cacheKey + " is still referred by " + refCount + " readers. Can not be freed now. Hence will mark this" + " for evicting at a later point");
                }
                bucketEntry.markedForEvict = true;
            }
        }
    } finally {
        lock.writeLock().unlock();
    }
    cacheStats.evicted(bucketEntry.getCachedTime(), cacheKey.isPrimary());
    return true;
}
Also used : ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock)

Example 23 with ReentrantReadWriteLock

use of java.util.concurrent.locks.ReentrantReadWriteLock in project hadoop by apache.

the class TestProportionalCapacityPreemptionPolicy method mockParentQueue.

ParentQueue mockParentQueue(ParentQueue p, int subqueues, Deque<ParentQueue> pqs) {
    ParentQueue pq = mock(ParentQueue.class);
    List<CSQueue> cqs = new ArrayList<CSQueue>();
    when(pq.getChildQueues()).thenReturn(cqs);
    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
    when(pq.getReadLock()).thenReturn(lock.readLock());
    // Ordering policy
    QueueOrderingPolicy policy = mock(QueueOrderingPolicy.class);
    when(policy.getConfigName()).thenReturn(CapacitySchedulerConfiguration.QUEUE_PRIORITY_UTILIZATION_ORDERING_POLICY);
    when(pq.getQueueOrderingPolicy()).thenReturn(policy);
    when(pq.getPriority()).thenReturn(Priority.newInstance(0));
    for (int i = 0; i < subqueues; ++i) {
        pqs.add(pq);
    }
    if (p != null) {
        p.getChildQueues().add(pq);
    }
    return pq;
}
Also used : ParentQueue(org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue) ArrayList(java.util.ArrayList) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) QueueOrderingPolicy(org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.policy.QueueOrderingPolicy) CSQueue(org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue)

Example 24 with ReentrantReadWriteLock

use of java.util.concurrent.locks.ReentrantReadWriteLock in project hive by apache.

the class QueryTracker method getDagLock.

private ReadWriteLock getDagLock(QueryIdentifier queryIdentifier) {
    lock.lock();
    try {
        ReadWriteLock dagLock = dagSpecificLocks.get(queryIdentifier);
        if (dagLock == null) {
            dagLock = new ReentrantReadWriteLock();
            dagSpecificLocks.put(queryIdentifier, dagLock);
        }
        return dagLock;
    } finally {
        lock.unlock();
    }
}
Also used : ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock) ReadWriteLock(java.util.concurrent.locks.ReadWriteLock) ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock)

Example 25 with ReentrantReadWriteLock

use of java.util.concurrent.locks.ReentrantReadWriteLock in project gocd by gocd.

the class DynamicReadWriteLock method getLock.

private ReadWriteLock getLock(String key) {
    synchronized (key.intern()) {
        if (locks.containsKey(key)) {
            return locks.get(key);
        }
        ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
        locks.put(key, lock);
        return lock;
    }
}
Also used : ReentrantReadWriteLock(java.util.concurrent.locks.ReentrantReadWriteLock)

Aggregations

ReentrantReadWriteLock (java.util.concurrent.locks.ReentrantReadWriteLock)52 ReadWriteLock (java.util.concurrent.locks.ReadWriteLock)17 Test (org.junit.Test)15 ArrayList (java.util.ArrayList)6 Lock (java.util.concurrent.locks.Lock)5 DatasetGraph (org.apache.jena.sparql.core.DatasetGraph)5 HashMap (java.util.HashMap)4 IgniteInternalFuture (org.apache.ignite.internal.IgniteInternalFuture)4 Nullable (org.jetbrains.annotations.Nullable)4 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)3 InternalErrorException (cz.metacentrum.perun.core.api.exceptions.InternalErrorException)2 IOException (java.io.IOException)2 List (java.util.List)2 TreeSet (java.util.TreeSet)2 ExecutorService (java.util.concurrent.ExecutorService)2 AtomicLong (java.util.concurrent.atomic.AtomicLong)2 ReentrantLock (java.util.concurrent.locks.ReentrantLock)2 PostLoad (javax.persistence.PostLoad)2 Configuration (org.apache.hadoop.conf.Configuration)2 Path (org.apache.hadoop.fs.Path)2