Search in sources :

Example 81 with CountDownLatch

use of java.util.concurrent.CountDownLatch in project hbase by apache.

the class TestCounter method execute.

private static void execute(final Operation op, int threadCount) throws InterruptedException {
    final CountDownLatch prepareLatch = new CountDownLatch(threadCount);
    final CountDownLatch startLatch = new CountDownLatch(1);
    final CountDownLatch endLatch = new CountDownLatch(threadCount);
    class OperationThread extends Thread {

        @Override
        public void run() {
            try {
                prepareLatch.countDown();
                startLatch.await();
                for (int i = 0; i < DATA_COUNT; i++) {
                    op.execute();
                }
                endLatch.countDown();
            } catch (Exception e) {
            }
        }
    }
    for (int j = 0; j < threadCount; j++) {
        new OperationThread().start();
    }
    prepareLatch.await();
    startLatch.countDown();
    endLatch.await();
}
Also used : CountDownLatch(java.util.concurrent.CountDownLatch)

Example 82 with CountDownLatch

use of java.util.concurrent.CountDownLatch in project hbase by apache.

the class TestProcedureNonce method testConcurrentNonceRegistration.

private void testConcurrentNonceRegistration(final boolean submitProcedure, final long nonceGroup, final long nonce) throws IOException {
    // register the nonce
    final NonceKey nonceKey = procExecutor.createNonceKey(nonceGroup, nonce);
    final AtomicReference<Throwable> t1Exception = new AtomicReference();
    final AtomicReference<Throwable> t2Exception = new AtomicReference();
    final CountDownLatch t1NonceRegisteredLatch = new CountDownLatch(1);
    final CountDownLatch t2BeforeNonceRegisteredLatch = new CountDownLatch(1);
    final Thread[] threads = new Thread[2];
    threads[0] = new Thread() {

        @Override
        public void run() {
            try {
                // release the nonce and wake t2
                assertFalse("unexpected already registered nonce", procExecutor.registerNonce(nonceKey) >= 0);
                t1NonceRegisteredLatch.countDown();
                // hold the submission until t2 is registering the nonce
                t2BeforeNonceRegisteredLatch.await();
                Threads.sleep(1000);
                if (submitProcedure) {
                    CountDownLatch latch = new CountDownLatch(1);
                    TestSingleStepProcedure proc = new TestSingleStepProcedure();
                    procEnv.setWaitLatch(latch);
                    procExecutor.submitProcedure(proc, nonceKey);
                    Threads.sleep(100);
                    // complete the procedure
                    latch.countDown();
                } else {
                    procExecutor.unregisterNonceIfProcedureWasNotSubmitted(nonceKey);
                }
            } catch (Throwable e) {
                t1Exception.set(e);
            } finally {
                t1NonceRegisteredLatch.countDown();
                t2BeforeNonceRegisteredLatch.countDown();
            }
        }
    };
    threads[1] = new Thread() {

        @Override
        public void run() {
            try {
                // wait until t1 has registered the nonce
                t1NonceRegisteredLatch.await();
                // register the nonce
                t2BeforeNonceRegisteredLatch.countDown();
                assertFalse("unexpected non registered nonce", procExecutor.registerNonce(nonceKey) < 0);
            } catch (Throwable e) {
                t2Exception.set(e);
            } finally {
                t1NonceRegisteredLatch.countDown();
                t2BeforeNonceRegisteredLatch.countDown();
            }
        }
    };
    for (int i = 0; i < threads.length; ++i) threads[i].start();
    for (int i = 0; i < threads.length; ++i) Threads.shutdown(threads[i]);
    ProcedureTestingUtility.waitNoProcedureRunning(procExecutor);
    assertEquals(null, t1Exception.get());
    assertEquals(null, t2Exception.get());
}
Also used : NonceKey(org.apache.hadoop.hbase.util.NonceKey) AtomicReference(java.util.concurrent.atomic.AtomicReference) CountDownLatch(java.util.concurrent.CountDownLatch)

Example 83 with CountDownLatch

use of java.util.concurrent.CountDownLatch in project hbase by apache.

the class TestProcedureNonce method testRunningProcWithSameNonce.

@Test(timeout = 30000)
public void testRunningProcWithSameNonce() throws Exception {
    final long nonceGroup = 456;
    final long nonce = 33333;
    // register the nonce
    final NonceKey nonceKey = procExecutor.createNonceKey(nonceGroup, nonce);
    assertFalse(procExecutor.registerNonce(nonceKey) >= 0);
    // Submit a proc and use a latch to prevent the step execution until we submitted proc2
    CountDownLatch latch = new CountDownLatch(1);
    TestSingleStepProcedure proc = new TestSingleStepProcedure();
    procEnv.setWaitLatch(latch);
    long procId = procExecutor.submitProcedure(proc, nonceKey);
    while (proc.step != 1) Threads.sleep(25);
    // try to register a procedure with the same nonce
    // we should get back the old procId
    assertEquals(procId, procExecutor.registerNonce(nonceKey));
    // complete the procedure
    latch.countDown();
    // Restart, the procedure is not completed yet
    ProcedureTestingUtility.restart(procExecutor);
    ProcedureTestingUtility.waitProcedure(procExecutor, procId);
    // try to register a procedure with the same nonce
    // we should get back the old procId
    assertEquals(procId, procExecutor.registerNonce(nonceKey));
    ProcedureInfo result = procExecutor.getResult(procId);
    ProcedureTestingUtility.assertProcNotFailed(result);
}
Also used : NonceKey(org.apache.hadoop.hbase.util.NonceKey) ProcedureInfo(org.apache.hadoop.hbase.ProcedureInfo) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.junit.Test)

Example 84 with CountDownLatch

use of java.util.concurrent.CountDownLatch in project hbase by apache.

the class TestAvoidCellReferencesIntoShippedBlocks method setUpBeforeClass.

/**
   * @throws java.lang.Exception
   */
@BeforeClass
public static void setUpBeforeClass() throws Exception {
    ROWS[0] = ROW;
    ROWS[1] = ROW1;
    Configuration conf = TEST_UTIL.getConfiguration();
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, MultiRowMutationEndpoint.class.getName());
    // enable for below
    conf.setBoolean("hbase.table.sanity.checks", true);
    // tests
    conf.setInt("hbase.regionserver.handler.count", 20);
    conf.setInt("hbase.bucketcache.size", 400);
    conf.setStrings("hbase.bucketcache.ioengine", "offheap");
    conf.setInt("hbase.hstore.compactionThreshold", 7);
    conf.setFloat("hfile.block.cache.size", 0.2f);
    conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f);
    // do not retry
    conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 0);
    conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 500000);
    FAMILIES_1[0] = FAMILY;
    TEST_UTIL.startMiniCluster(SLAVES);
    compactReadLatch = new CountDownLatch(1);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) CountDownLatch(java.util.concurrent.CountDownLatch) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) BeforeClass(org.junit.BeforeClass)

Example 85 with CountDownLatch

use of java.util.concurrent.CountDownLatch in project hbase by apache.

the class TestBlockEvictionFromClient method testMultiGets.

@Test
public void testMultiGets() throws IOException, InterruptedException {
    Table table = null;
    try {
        latch = new CountDownLatch(2);
        // Check if get() returns blocks on its close() itself
        getLatch = new CountDownLatch(1);
        final TableName tableName = TableName.valueOf(name.getMethodName());
        // Create KV that will give you two blocks
        // Create a table with block size as 1024
        table = TEST_UTIL.createTable(tableName, FAMILIES_1, 1, 1024, CustomInnerRegionObserver.class.getName());
        // get the block cache and region
        RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName);
        String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName();
        Region region = TEST_UTIL.getRSForFirstRegionInTable(tableName).getFromOnlineRegions(regionName);
        Store store = region.getStores().iterator().next();
        CacheConfig cacheConf = store.getCacheConfig();
        cacheConf.setCacheDataOnWrite(true);
        cacheConf.setEvictOnClose(true);
        BlockCache cache = cacheConf.getBlockCache();
        Put put = new Put(ROW);
        put.addColumn(FAMILY, QUALIFIER, data);
        table.put(put);
        region.flush(true);
        put = new Put(ROW1);
        put.addColumn(FAMILY, QUALIFIER, data);
        table.put(put);
        region.flush(true);
        byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
        put = new Put(ROW);
        put.addColumn(FAMILY, QUALIFIER2, data2);
        table.put(put);
        region.flush(true);
        // flush the data
        System.out.println("Flushing cache");
        // Should create one Hfile with 2 blocks
        CustomInnerRegionObserver.waitForGets.set(true);
        // Create three sets of gets
        MultiGetThread[] getThreads = initiateMultiGet(table);
        Thread.sleep(200);
        int refCount;
        Iterator<CachedBlock> iterator = cache.iterator();
        boolean foundNonZeroBlock = false;
        while (iterator.hasNext()) {
            CachedBlock next = iterator.next();
            BlockCacheKey cacheKey = new BlockCacheKey(next.getFilename(), next.getOffset());
            if (cache instanceof BucketCache) {
                refCount = ((BucketCache) cache).getRefCount(cacheKey);
            } else if (cache instanceof CombinedBlockCache) {
                refCount = ((CombinedBlockCache) cache).getRefCount(cacheKey);
            } else {
                continue;
            }
            if (refCount != 0) {
                assertEquals(NO_OF_THREADS, refCount);
                foundNonZeroBlock = true;
            }
        }
        assertTrue("Should have found nonzero ref count block", foundNonZeroBlock);
        CustomInnerRegionObserver.getCdl().get().countDown();
        CustomInnerRegionObserver.getCdl().get().countDown();
        for (MultiGetThread thread : getThreads) {
            thread.join();
        }
        // Verify whether the gets have returned the blocks that it had
        CustomInnerRegionObserver.waitForGets.set(true);
        // giving some time for the block to be decremented
        iterateBlockCache(cache, iterator);
        getLatch.countDown();
        System.out.println("Gets should have returned the bloks");
    } finally {
        if (table != null) {
            table.close();
        }
    }
}
Also used : CachedBlock(org.apache.hadoop.hbase.io.hfile.CachedBlock) Store(org.apache.hadoop.hbase.regionserver.Store) CountDownLatch(java.util.concurrent.CountDownLatch) BlockCacheKey(org.apache.hadoop.hbase.io.hfile.BlockCacheKey) MultiRowMutationEndpoint(org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint) TableName(org.apache.hadoop.hbase.TableName) CombinedBlockCache(org.apache.hadoop.hbase.io.hfile.CombinedBlockCache) BlockCache(org.apache.hadoop.hbase.io.hfile.BlockCache) CombinedBlockCache(org.apache.hadoop.hbase.io.hfile.CombinedBlockCache) BucketCache(org.apache.hadoop.hbase.io.hfile.bucket.BucketCache) Region(org.apache.hadoop.hbase.regionserver.Region) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Aggregations

CountDownLatch (java.util.concurrent.CountDownLatch)5355 Test (org.junit.Test)2594 IOException (java.io.IOException)631 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)550 AtomicReference (java.util.concurrent.atomic.AtomicReference)501 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)475 ArrayList (java.util.ArrayList)471 QuickTest (com.hazelcast.test.annotation.QuickTest)375 ParallelTest (com.hazelcast.test.annotation.ParallelTest)355 ExecutorService (java.util.concurrent.ExecutorService)322 Test (org.testng.annotations.Test)310 HazelcastInstance (com.hazelcast.core.HazelcastInstance)251 List (java.util.List)212 HashMap (java.util.HashMap)207 HttpServletResponse (javax.servlet.http.HttpServletResponse)207 ExecutionException (java.util.concurrent.ExecutionException)203 HttpServletRequest (javax.servlet.http.HttpServletRequest)189 Ignite (org.apache.ignite.Ignite)188 ServletException (javax.servlet.ServletException)183 TimeoutException (java.util.concurrent.TimeoutException)168