Search in sources :

Example 1 with LightWeightCache

use of org.apache.hadoop.util.LightWeightCache in project hadoop by apache.

the class TestRetryCacheWithHA method testRetryCacheOnStandbyNN.

/**
   * 1. Run a set of operations
   * 2. Trigger the NN failover
   * 3. Check the retry cache on the original standby NN
   */
@Test(timeout = 60000)
public void testRetryCacheOnStandbyNN() throws Exception {
    // 1. run operations
    DFSTestUtil.runOperations(cluster, dfs, conf, BlockSize, 0);
    // check retry cache in NN1
    FSNamesystem fsn0 = cluster.getNamesystem(0);
    LightWeightCache<CacheEntry, CacheEntry> cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) fsn0.getRetryCache().getCacheSet();
    assertEquals("Retry cache size is wrong", 26, cacheSet.size());
    Map<CacheEntry, CacheEntry> oldEntries = new HashMap<CacheEntry, CacheEntry>();
    Iterator<CacheEntry> iter = cacheSet.iterator();
    while (iter.hasNext()) {
        CacheEntry entry = iter.next();
        oldEntries.put(entry, entry);
    }
    // 2. Failover the current standby to active.
    cluster.getNameNode(0).getRpcServer().rollEditLog();
    cluster.getNameNode(1).getNamesystem().getEditLogTailer().doTailEdits();
    cluster.shutdownNameNode(0);
    cluster.transitionToActive(1);
    // 3. check the retry cache on the new active NN
    FSNamesystem fsn1 = cluster.getNamesystem(1);
    cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) fsn1.getRetryCache().getCacheSet();
    assertEquals("Retry cache size is wrong", 26, cacheSet.size());
    iter = cacheSet.iterator();
    while (iter.hasNext()) {
        CacheEntry entry = iter.next();
        assertTrue(oldEntries.containsKey(entry));
    }
}
Also used : LightWeightCache(org.apache.hadoop.util.LightWeightCache) HashMap(java.util.HashMap) CacheEntry(org.apache.hadoop.ipc.RetryCache.CacheEntry) FSNamesystem(org.apache.hadoop.hdfs.server.namenode.FSNamesystem) Test(org.junit.Test)

Example 2 with LightWeightCache

use of org.apache.hadoop.util.LightWeightCache in project hadoop by apache.

the class TestNamenodeRetryCache method testRetryCacheRebuild.

/**
   * After run a set of operations, restart NN and check if the retry cache has
   * been rebuilt based on the editlog.
   */
@Test
public void testRetryCacheRebuild() throws Exception {
    DFSTestUtil.runOperations(cluster, filesystem, conf, BlockSize, 0);
    FSNamesystem namesystem = cluster.getNamesystem();
    LightWeightCache<CacheEntry, CacheEntry> cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) namesystem.getRetryCache().getCacheSet();
    assertEquals("Retry cache size is wrong", 26, cacheSet.size());
    Map<CacheEntry, CacheEntry> oldEntries = new HashMap<CacheEntry, CacheEntry>();
    Iterator<CacheEntry> iter = cacheSet.iterator();
    while (iter.hasNext()) {
        CacheEntry entry = iter.next();
        oldEntries.put(entry, entry);
    }
    // restart NameNode
    cluster.restartNameNode();
    cluster.waitActive();
    namesystem = cluster.getNamesystem();
    // check retry cache
    assertTrue(namesystem.hasRetryCache());
    cacheSet = (LightWeightCache<CacheEntry, CacheEntry>) namesystem.getRetryCache().getCacheSet();
    assertEquals("Retry cache size is wrong", 26, cacheSet.size());
    iter = cacheSet.iterator();
    while (iter.hasNext()) {
        CacheEntry entry = iter.next();
        assertTrue(oldEntries.containsKey(entry));
    }
}
Also used : LightWeightCache(org.apache.hadoop.util.LightWeightCache) HashMap(java.util.HashMap) CacheEntry(org.apache.hadoop.ipc.RetryCache.CacheEntry) Test(org.junit.Test)

Aggregations

HashMap (java.util.HashMap)2 CacheEntry (org.apache.hadoop.ipc.RetryCache.CacheEntry)2 LightWeightCache (org.apache.hadoop.util.LightWeightCache)2 Test (org.junit.Test)2 FSNamesystem (org.apache.hadoop.hdfs.server.namenode.FSNamesystem)1