use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.
the class TestResettingCounters method testResettingCounters.
@Test
public void testResettingCounters() throws Exception {
HBaseTestingUtil htu = new HBaseTestingUtil();
Configuration conf = htu.getConfiguration();
FileSystem fs = FileSystem.get(conf);
byte[] table = Bytes.toBytes(name.getMethodName());
byte[][] families = new byte[][] { Bytes.toBytes("family1"), Bytes.toBytes("family2"), Bytes.toBytes("family3") };
int numQualifiers = 10;
byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) qualifiers[i] = Bytes.toBytes("qf" + i);
int numRows = 10;
byte[][] rows = new byte[numRows][];
for (int i = 0; i < numRows; i++) rows[i] = Bytes.toBytes("r" + i);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(table));
for (byte[] family : families) {
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
}
TableDescriptor tableDescriptor = builder.build();
RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName()).build();
String testDir = htu.getDataTestDir() + "/TestResettingCounters/";
Path path = new Path(testDir);
if (fs.exists(path)) {
if (!fs.delete(path, true)) {
throw new IOException("Failed delete of " + path);
}
}
HRegion region = HBaseTestingUtil.createRegionAndWAL(hri, path, conf, tableDescriptor);
try {
Increment odd = new Increment(rows[0]);
odd.setDurability(Durability.SKIP_WAL);
Increment even = new Increment(rows[0]);
even.setDurability(Durability.SKIP_WAL);
Increment all = new Increment(rows[0]);
all.setDurability(Durability.SKIP_WAL);
for (int i = 0; i < numQualifiers; i++) {
if (i % 2 == 0)
even.addColumn(families[0], qualifiers[i], 1);
else
odd.addColumn(families[0], qualifiers[i], 1);
all.addColumn(families[0], qualifiers[i], 1);
}
// increment odd qualifiers 5 times and flush
for (int i = 0; i < 5; i++) region.increment(odd, HConstants.NO_NONCE, HConstants.NO_NONCE);
region.flush(true);
// increment even qualifiers 5 times
for (int i = 0; i < 5; i++) region.increment(even, HConstants.NO_NONCE, HConstants.NO_NONCE);
// increment all qualifiers, should have value=6 for all
Result result = region.increment(all, HConstants.NO_NONCE, HConstants.NO_NONCE);
assertEquals(numQualifiers, result.size());
Cell[] kvs = result.rawCells();
for (int i = 0; i < kvs.length; i++) {
System.out.println(kvs[i].toString());
assertTrue(CellUtil.matchingQualifier(kvs[i], qualifiers[i]));
assertEquals(6, Bytes.toLong(CellUtil.cloneValue(kvs[i])));
}
} finally {
HBaseTestingUtil.closeRegionAndWAL(region);
}
HBaseTestingUtil.closeRegionAndWAL(region);
}
use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.
the class TestRegionObserverInterface method testPreWALAppendHook.
// called from testPreWALAppendIsWrittenToWAL
private void testPreWALAppendHook(Table table, TableName tableName) throws IOException {
int expectedCalls = 0;
String[] methodArray = new String[1];
methodArray[0] = "getCtPreWALAppend";
Object[] resultArray = new Object[1];
Put p = new Put(ROW);
p.addColumn(A, A, A);
table.put(p);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Append a = new Append(ROW);
a.addColumn(B, B, B);
table.append(a);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Increment i = new Increment(ROW);
i.addColumn(C, C, 1);
table.increment(i);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Delete d = new Delete(ROW);
table.delete(d);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
}
use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.
the class TestPostIncrementAndAppendBeforeWAL method testChangeCellWithNotExistColumnFamily.
@Test
public void testChangeCellWithNotExistColumnFamily() throws Exception {
TableName tableName = TableName.valueOf(name.getMethodName());
createTableWithCoprocessor(tableName, ChangeCellWithNotExistColumnFamilyObserver.class.getName());
try (Table table = connection.getTable(tableName)) {
try {
Increment increment = new Increment(ROW).addColumn(CF1_BYTES, CQ1, 1);
table.increment(increment);
fail("should throw NoSuchColumnFamilyException");
} catch (Exception e) {
assertTrue(e instanceof NoSuchColumnFamilyException);
}
try {
Append append = new Append(ROW).addColumn(CF1_BYTES, CQ2, VALUE);
table.append(append);
fail("should throw NoSuchColumnFamilyException");
} catch (Exception e) {
assertTrue(e instanceof NoSuchColumnFamilyException);
}
}
}
use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.
the class TestIncrementAndAppendWithNullResult method testIncrement.
@Test
public void testIncrement() throws Exception {
testAppend(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 10L));
testAppend(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 10L).setReturnResults(false));
}
use of org.apache.hadoop.hbase.client.Increment in project hbase by apache.
the class TestRegionObserverInterface method testIncrementHook.
@Test
public void testIncrementHook() throws IOException {
final TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + "." + name.getMethodName());
Table table = util.createTable(tableName, new byte[][] { A, B, C });
try {
Increment inc = new Increment(Bytes.toBytes(0));
inc.addColumn(A, A, 1);
verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreIncrement", "hadPostIncrement", "hadPreIncrementAfterRowLock", "hadPreBatchMutate", "hadPostBatchMutate", "hadPostBatchMutateIndispensably" }, tableName, new Boolean[] { false, false, false, false, false, false });
table.increment(inc);
verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreIncrement", "hadPostIncrement", "hadPreIncrementAfterRowLock", "hadPreBatchMutate", "hadPostBatchMutate", "hadPostBatchMutateIndispensably" }, tableName, new Boolean[] { true, true, true, true, true, true });
} finally {
util.deleteTable(tableName);
table.close();
}
}
Aggregations