Search in sources :

Example 6 with IncrementingEnvironmentEdge

use of org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge in project hbase by apache.

the class TestRegionObserverBypass method testMulti.

/**
   * Test various multiput operations.
   * @throws Exception
   */
@Test
public void testMulti() throws Exception {
    //ensure that server time increments every time we do an operation, otherwise
    //previous deletes will eclipse successive puts having the same timestamp
    EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
    Table t = util.getConnection().getTable(tableName);
    List<Put> puts = new ArrayList<>();
    Put p = new Put(row1);
    p.addColumn(dummy, dummy, dummy);
    puts.add(p);
    p = new Put(row2);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row3);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    // before HBASE-4331, this would throw an exception
    t.put(puts);
    checkRowAndDelete(t, row1, 1);
    checkRowAndDelete(t, row2, 0);
    checkRowAndDelete(t, row3, 0);
    puts.clear();
    p = new Put(row1);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row2);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row3);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    // before HBASE-4331, this would throw an exception
    t.put(puts);
    checkRowAndDelete(t, row1, 0);
    checkRowAndDelete(t, row2, 0);
    checkRowAndDelete(t, row3, 0);
    puts.clear();
    p = new Put(row1);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row2);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row3);
    p.addColumn(dummy, dummy, dummy);
    puts.add(p);
    // this worked fine even before HBASE-4331
    t.put(puts);
    checkRowAndDelete(t, row1, 0);
    checkRowAndDelete(t, row2, 0);
    checkRowAndDelete(t, row3, 1);
    puts.clear();
    p = new Put(row1);
    p.addColumn(dummy, dummy, dummy);
    puts.add(p);
    p = new Put(row2);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row3);
    p.addColumn(dummy, dummy, dummy);
    puts.add(p);
    // this worked fine even before HBASE-4331
    t.put(puts);
    checkRowAndDelete(t, row1, 1);
    checkRowAndDelete(t, row2, 0);
    checkRowAndDelete(t, row3, 1);
    puts.clear();
    p = new Put(row1);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    p = new Put(row2);
    p.addColumn(dummy, dummy, dummy);
    puts.add(p);
    p = new Put(row3);
    p.addColumn(test, dummy, dummy);
    puts.add(p);
    // before HBASE-4331, this would throw an exception
    t.put(puts);
    checkRowAndDelete(t, row1, 0);
    checkRowAndDelete(t, row2, 1);
    checkRowAndDelete(t, row3, 0);
    t.close();
    EnvironmentEdgeManager.reset();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ArrayList(java.util.ArrayList) IncrementingEnvironmentEdge(org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 7 with IncrementingEnvironmentEdge

use of org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge in project hbase by apache.

the class TestHRegion method doTestDelete_AndPostInsert.

public void doTestDelete_AndPostInsert(Delete delete) throws IOException, InterruptedException {
    this.region = initHRegion(tableName, method, CONF, fam1);
    try {
        EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
        Put put = new Put(row);
        put.addColumn(fam1, qual1, value1);
        region.put(put);
        // now delete the value:
        region.delete(delete);
        // ok put data:
        put = new Put(row);
        put.addColumn(fam1, qual1, value2);
        region.put(put);
        // ok get:
        Get get = new Get(row);
        get.addColumn(fam1, qual1);
        Result r = region.get(get);
        assertEquals(1, r.size());
        assertArrayEquals(value2, r.getValue(fam1, qual1));
        // next:
        Scan scan = new Scan(row);
        scan.addColumn(fam1, qual1);
        InternalScanner s = region.getScanner(scan);
        List<Cell> results = new ArrayList<>();
        assertEquals(false, s.next(results));
        assertEquals(1, results.size());
        Cell kv = results.get(0);
        assertArrayEquals(value2, CellUtil.cloneValue(kv));
        assertArrayEquals(fam1, CellUtil.cloneFamily(kv));
        assertArrayEquals(qual1, CellUtil.cloneQualifier(kv));
        assertArrayEquals(row, CellUtil.cloneRow(kv));
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(this.region);
        this.region = null;
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) Cell(org.apache.hadoop.hbase.Cell) IncrementingEnvironmentEdge(org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

IncrementingEnvironmentEdge (org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge)7 Put (org.apache.hadoop.hbase.client.Put)4 Get (org.apache.hadoop.hbase.client.Get)3 Result (org.apache.hadoop.hbase.client.Result)3 Test (org.junit.Test)3 ArrayList (java.util.ArrayList)2 Configuration (org.apache.hadoop.conf.Configuration)2 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)2 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 BeforeClass (org.junit.BeforeClass)2 ArrayBackedTag (org.apache.hadoop.hbase.ArrayBackedTag)1 Cell (org.apache.hadoop.hbase.Cell)1 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)1 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)1 Delete (org.apache.hadoop.hbase.client.Delete)1 Increment (org.apache.hadoop.hbase.client.Increment)1 Scan (org.apache.hadoop.hbase.client.Scan)1 Table (org.apache.hadoop.hbase.client.Table)1 CompactionConfiguration (org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration)1