Search in sources :

Example 11 with ManualEnvironmentEdge

use of org.apache.hadoop.hbase.util.ManualEnvironmentEdge in project hbase by apache.

the class TestStripeCompactionPolicy method testMergeExpiredFiles.

@SuppressWarnings("unchecked")
@Test
public void testMergeExpiredFiles() throws Exception {
    ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
    long now = defaultTtl + 2;
    edge.setValue(now);
    EnvironmentEdgeManager.injectEdge(edge);
    try {
        StoreFile expiredFile = createFile(), notExpiredFile = createFile();
        when(expiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl - 1);
        when(notExpiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl + 1);
        List<StoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
        List<StoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
        List<StoreFile> mixed = Lists.newArrayList(expiredFile, notExpiredFile);
        StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create(), defaultSplitSize, defaultSplitCount, defaultInitialCount, true);
        // Merge expired if there are eligible stripes.
        StripeCompactionPolicy.StripeInformationProvider si = createStripesWithFiles(expired, expired, expired);
        verifyWholeStripesCompaction(policy, si, 0, 2, null, 1, Long.MAX_VALUE, false);
        // Don't merge if nothing expired.
        si = createStripesWithFiles(notExpired, notExpired, notExpired);
        assertNull(policy.selectCompaction(si, al(), false));
        // Merge one expired stripe with next.
        si = createStripesWithFiles(notExpired, expired, notExpired);
        verifyWholeStripesCompaction(policy, si, 1, 2, null, 1, Long.MAX_VALUE, false);
        // Merge the biggest run out of multiple options.
        // Merge one expired stripe with next.
        si = createStripesWithFiles(notExpired, expired, notExpired, expired, expired, notExpired);
        verifyWholeStripesCompaction(policy, si, 3, 4, null, 1, Long.MAX_VALUE, false);
        // Stripe with a subset of expired files is not merged.
        si = createStripesWithFiles(expired, expired, notExpired, expired, mixed);
        verifyWholeStripesCompaction(policy, si, 0, 1, null, 1, Long.MAX_VALUE, false);
    } finally {
        EnvironmentEdgeManager.reset();
    }
}
Also used : StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) StripeInformationProvider(org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider) Test(org.junit.Test)

Example 12 with ManualEnvironmentEdge

use of org.apache.hadoop.hbase.util.ManualEnvironmentEdge in project hbase by apache.

the class TestStripeCompactionPolicy method testMergeExpiredStripes.

@SuppressWarnings("unchecked")
@Test
public void testMergeExpiredStripes() throws Exception {
    // HBASE-11397
    ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
    long now = defaultTtl + 2;
    edge.setValue(now);
    EnvironmentEdgeManager.injectEdge(edge);
    try {
        StoreFile expiredFile = createFile(), notExpiredFile = createFile();
        when(expiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl - 1);
        when(notExpiredFile.getReader().getMaxTimestamp()).thenReturn(now - defaultTtl + 1);
        List<StoreFile> expired = Lists.newArrayList(expiredFile, expiredFile);
        List<StoreFile> notExpired = Lists.newArrayList(notExpiredFile, notExpiredFile);
        StripeCompactionPolicy policy = createPolicy(HBaseConfiguration.create(), defaultSplitSize, defaultSplitCount, defaultInitialCount, true);
        // Merge all three expired stripes into one.
        StripeCompactionPolicy.StripeInformationProvider si = createStripesWithFiles(expired, expired, expired);
        verifyMergeCompatcion(policy, si, 0, 2);
        // Merge two adjacent expired stripes into one.
        si = createStripesWithFiles(notExpired, expired, notExpired, expired, expired, notExpired);
        verifyMergeCompatcion(policy, si, 3, 4);
    } finally {
        EnvironmentEdgeManager.reset();
    }
}
Also used : StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) StripeInformationProvider(org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider) Test(org.junit.Test)

Example 13 with ManualEnvironmentEdge

use of org.apache.hadoop.hbase.util.ManualEnvironmentEdge in project hbase by apache.

the class AbstractTestDateTieredCompactionPolicy method compactEquals.

protected void compactEquals(long now, ArrayList<StoreFile> candidates, long[] expectedFileSizes, long[] expectedBoundaries, boolean isMajor, boolean toCompact) throws IOException {
    ManualEnvironmentEdge timeMachine = new ManualEnvironmentEdge();
    EnvironmentEdgeManager.injectEdge(timeMachine);
    timeMachine.setValue(now);
    DateTieredCompactionRequest request;
    DateTieredCompactionPolicy policy = (DateTieredCompactionPolicy) store.storeEngine.getCompactionPolicy();
    if (isMajor) {
        for (StoreFile file : candidates) {
            ((MockStoreFile) file).setIsMajor(true);
        }
        assertEquals(toCompact, policy.shouldPerformMajorCompaction(candidates));
        request = (DateTieredCompactionRequest) policy.selectMajorCompaction(candidates);
    } else {
        assertEquals(toCompact, policy.needsCompaction(candidates, ImmutableList.<StoreFile>of()));
        request = (DateTieredCompactionRequest) policy.selectMinorCompaction(candidates, false, false);
    }
    List<StoreFile> actual = Lists.newArrayList(request.getFiles());
    assertEquals(Arrays.toString(expectedFileSizes), Arrays.toString(getSizes(actual)));
    assertEquals(Arrays.toString(expectedBoundaries), Arrays.toString(request.getBoundaries().toArray()));
}
Also used : DateTieredCompactionRequest(org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionRequest) DateTieredCompactionPolicy(org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionPolicy) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge)

Example 14 with ManualEnvironmentEdge

use of org.apache.hadoop.hbase.util.ManualEnvironmentEdge in project hbase by apache.

the class TestHRegion method testAppendTimestampsAreMonotonic.

@Test
public void testAppendTimestampsAreMonotonic() throws IOException {
    HRegion region = initHRegion(tableName, method, CONF, fam1);
    ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
    EnvironmentEdgeManager.injectEdge(edge);
    edge.setValue(10);
    Append a = new Append(row);
    a.setDurability(Durability.SKIP_WAL);
    a.add(fam1, qual1, qual1);
    region.append(a);
    Result result = region.get(new Get(row));
    Cell c = result.getColumnLatestCell(fam1, qual1);
    assertNotNull(c);
    assertEquals(c.getTimestamp(), 10L);
    // clock goes back
    edge.setValue(1);
    region.append(a);
    result = region.get(new Get(row));
    c = result.getColumnLatestCell(fam1, qual1);
    assertEquals(c.getTimestamp(), 11L);
    byte[] expected = new byte[qual1.length * 2];
    System.arraycopy(qual1, 0, expected, 0, qual1.length);
    System.arraycopy(qual1, 0, expected, qual1.length, qual1.length);
    assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(), expected, 0, expected.length));
}
Also used : Append(org.apache.hadoop.hbase.client.Append) Get(org.apache.hadoop.hbase.client.Get) Cell(org.apache.hadoop.hbase.Cell) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 15 with ManualEnvironmentEdge

use of org.apache.hadoop.hbase.util.ManualEnvironmentEdge in project hbase by apache.

the class TestHRegion method testCheckAndMutateTimestampsAreMonotonic.

@Test
public void testCheckAndMutateTimestampsAreMonotonic() throws IOException {
    HRegion region = initHRegion(tableName, method, CONF, fam1);
    ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
    EnvironmentEdgeManager.injectEdge(edge);
    edge.setValue(10);
    Put p = new Put(row);
    p.setDurability(Durability.SKIP_WAL);
    p.addColumn(fam1, qual1, qual1);
    region.put(p);
    Result result = region.get(new Get(row));
    Cell c = result.getColumnLatestCell(fam1, qual1);
    assertNotNull(c);
    assertEquals(c.getTimestamp(), 10L);
    // clock goes back
    edge.setValue(1);
    p = new Put(row);
    p.setDurability(Durability.SKIP_WAL);
    p.addColumn(fam1, qual1, qual2);
    region.checkAndMutate(row, fam1, qual1, CompareOp.EQUAL, new BinaryComparator(qual1), p, false);
    result = region.get(new Get(row));
    c = result.getColumnLatestCell(fam1, qual1);
    assertEquals(c.getTimestamp(), 10L);
    assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(), qual2, 0, qual2.length));
}
Also used : Get(org.apache.hadoop.hbase.client.Get) Cell(org.apache.hadoop.hbase.Cell) ManualEnvironmentEdge(org.apache.hadoop.hbase.util.ManualEnvironmentEdge) Put(org.apache.hadoop.hbase.client.Put) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

ManualEnvironmentEdge (org.apache.hadoop.hbase.util.ManualEnvironmentEdge)18 Test (org.junit.Test)14 Cell (org.apache.hadoop.hbase.Cell)4 Get (org.apache.hadoop.hbase.client.Get)4 Result (org.apache.hadoop.hbase.client.Result)4 Configuration (org.apache.hadoop.conf.Configuration)2 ScheduledChore (org.apache.hadoop.hbase.ScheduledChore)2 Stoppable (org.apache.hadoop.hbase.Stoppable)2 Put (org.apache.hadoop.hbase.client.Put)2 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)2 StoreFile (org.apache.hadoop.hbase.regionserver.StoreFile)2 StripeInformationProvider (org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider)2 BeforeClass (org.junit.BeforeClass)2 InetSocketAddress (java.net.InetSocketAddress)1 ArrayList (java.util.ArrayList)1 ServerName (org.apache.hadoop.hbase.ServerName)1 TableName (org.apache.hadoop.hbase.TableName)1 Append (org.apache.hadoop.hbase.client.Append)1 Increment (org.apache.hadoop.hbase.client.Increment)1 RowMutations (org.apache.hadoop.hbase.client.RowMutations)1