Search in sources :

Example 26 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestResettingCounters method testResettingCounters.

@Test
public void testResettingCounters() throws Exception {
    HBaseTestingUtil htu = new HBaseTestingUtil();
    Configuration conf = htu.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    byte[] table = Bytes.toBytes(name.getMethodName());
    byte[][] families = new byte[][] { Bytes.toBytes("family1"), Bytes.toBytes("family2"), Bytes.toBytes("family3") };
    int numQualifiers = 10;
    byte[][] qualifiers = new byte[numQualifiers][];
    for (int i = 0; i < numQualifiers; i++) qualifiers[i] = Bytes.toBytes("qf" + i);
    int numRows = 10;
    byte[][] rows = new byte[numRows][];
    for (int i = 0; i < numRows; i++) rows[i] = Bytes.toBytes("r" + i);
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(table));
    for (byte[] family : families) {
        builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
    }
    TableDescriptor tableDescriptor = builder.build();
    RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
    String testDir = htu.getDataTestDir() + "/TestResettingCounters/";
    Path path = new Path(testDir);
    if (fs.exists(path)) {
        if (!fs.delete(path, true)) {
            throw new IOException("Failed delete of " + path);
        }
    }
    HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, path, conf, tableDescriptor);
    try {
        Increment odd = new Increment(rows[0]);
        odd.setDurability(Durability.SKIP_WAL);
        Increment even = new Increment(rows[0]);
        even.setDurability(Durability.SKIP_WAL);
        Increment all = new Increment(rows[0]);
        all.setDurability(Durability.SKIP_WAL);
        for (int i = 0; i < numQualifiers; i++) {
            if (i % 2 == 0)
                even.addColumn(families[0], qualifiers[i], 1);
            else
                odd.addColumn(families[0], qualifiers[i], 1);
            all.addColumn(families[0], qualifiers[i], 1);
        }
        // increment odd qualifiers 5 times and flush
        for (int i = 0; i < 5; i++) region.increment(odd, HConstants.NO_NONCE, HConstants.NO_NONCE);
        region.flush(true);
        // increment even qualifiers 5 times
        for (int i = 0; i < 5; i++) region.increment(even, HConstants.NO_NONCE, HConstants.NO_NONCE);
        // increment all qualifiers, should have value=6 for all
        Result result = region.increment(all, HConstants.NO_NONCE, HConstants.NO_NONCE);
        assertEquals(numQualifiers, result.size());
        Cell[] kvs = result.rawCells();
        for (int i = 0; i < kvs.length; i++) {
            System.out.println(kvs[i].toString());
            assertTrue(CellUtil.matchingQualifier(kvs[i], qualifiers[i]));
            assertEquals(6, Bytes.toLong(CellUtil.cloneValue(kvs[i])));
        }
    } finally {
        HBaseTestingUtil.closeRegionAndWAL(region);
    }
    HBaseTestingUtil.closeRegionAndWAL(region);
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) IOException(java.io.IOException) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Result(org.apache.hadoop.hbase.client.Result) FileSystem(org.apache.hadoop.fs.FileSystem) Increment(org.apache.hadoop.hbase.client.Increment) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 27 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestRegionObserverInterface method testPreWALAppendHook.

// called from testPreWALAppendIsWrittenToWAL
private void testPreWALAppendHook(Table table, TableName tableName) throws IOException {
    int expectedCalls = 0;
    String[] methodArray = new String[1];
    methodArray[0] = "getCtPreWALAppend";
    Object[] resultArray = new Object[1];
    Put p = new Put(ROW);
    p.addColumn(A, A, A);
    table.put(p);
    resultArray[0] = ++expectedCalls;
    verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
    Append a = new Append(ROW);
    a.addColumn(B, B, B);
    table.append(a);
    resultArray[0] = ++expectedCalls;
    verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
    Increment i = new Increment(ROW);
    i.addColumn(C, C, 1);
    table.increment(i);
    resultArray[0] = ++expectedCalls;
    verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
    Delete d = new Delete(ROW);
    table.delete(d);
    resultArray[0] = ++expectedCalls;
    verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Append(org.apache.hadoop.hbase.client.Append) Increment(org.apache.hadoop.hbase.client.Increment) Put(org.apache.hadoop.hbase.client.Put)

Example 28 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestPostIncrementAndAppendBeforeWAL method testChangeCellWithNotExistColumnFamily.

@Test
public void testChangeCellWithNotExistColumnFamily() throws Exception {
    TableName tableName = TableName.valueOf(name.getMethodName());
    createTableWithCoprocessor(tableName, ChangeCellWithNotExistColumnFamilyObserver.class.getName());
    try (Table table = connection.getTable(tableName)) {
        try {
            Increment increment = new Increment(ROW).addColumn(CF1_BYTES, CQ1, 1);
            table.increment(increment);
            fail("should throw NoSuchColumnFamilyException");
        } catch (Exception e) {
            assertTrue(e instanceof NoSuchColumnFamilyException);
        }
        try {
            Append append = new Append(ROW).addColumn(CF1_BYTES, CQ2, VALUE);
            table.append(append);
            fail("should throw NoSuchColumnFamilyException");
        } catch (Exception e) {
            assertTrue(e instanceof NoSuchColumnFamilyException);
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) Append(org.apache.hadoop.hbase.client.Append) Increment(org.apache.hadoop.hbase.client.Increment) NoSuchColumnFamilyException(org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) NoSuchColumnFamilyException(org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException) Test(org.junit.Test)

Example 29 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestIncrementAndAppendWithNullResult method testIncrement.

@Test
public void testIncrement() throws Exception {
    testAppend(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 10L));
    testAppend(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 10L).setReturnResults(false));
}
Also used : Increment(org.apache.hadoop.hbase.client.Increment) Test(org.junit.Test)

Example 30 with Increment

use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.

the class TestRegionObserverInterface method testIncrementHook.

@Test
public void testIncrementHook() throws IOException {
    final TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + "." + name.getMethodName());
    Table table = util.createTable(tableName, new byte[][] { A, B, C });
    try {
        Increment inc = new Increment(Bytes.toBytes(0));
        inc.addColumn(A, A, 1);
        verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreIncrement", "hadPostIncrement", "hadPreIncrementAfterRowLock", "hadPreBatchMutate", "hadPostBatchMutate", "hadPostBatchMutateIndispensably" }, tableName, new Boolean[] { false, false, false, false, false, false });
        table.increment(inc);
        verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreIncrement", "hadPostIncrement", "hadPreIncrementAfterRowLock", "hadPreBatchMutate", "hadPostBatchMutate", "hadPostBatchMutateIndispensably" }, tableName, new Boolean[] { true, true, true, true, true, true });
    } finally {
        util.deleteTable(tableName);
        table.close();
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) Increment(org.apache.hadoop.hbase.client.Increment) Test(org.junit.Test)

Aggregations

Increment (org.apache.hadoop.hbase.client.Increment)81 Test (org.junit.Test)42 Put (org.apache.hadoop.hbase.client.Put)31 Append (org.apache.hadoop.hbase.client.Append)25 Result (org.apache.hadoop.hbase.client.Result)25 Delete (org.apache.hadoop.hbase.client.Delete)21 Get (org.apache.hadoop.hbase.client.Get)19 IOException (java.io.IOException)16 TableName (org.apache.hadoop.hbase.TableName)15 Table (org.apache.hadoop.hbase.client.Table)15 ArrayList (java.util.ArrayList)14 Cell (org.apache.hadoop.hbase.Cell)11 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)11 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)9 Mutation (org.apache.hadoop.hbase.client.Mutation)9 RowMutations (org.apache.hadoop.hbase.client.RowMutations)9 List (java.util.List)8 Map (java.util.Map)8 Scan (org.apache.hadoop.hbase.client.Scan)7 KeyValue (org.apache.hadoop.hbase.KeyValue)5