use of org.apache.hadoop.hbase.client.Append in project hbase by apache.
the class TestTags method testTagsWithAppendAndIncrement.
@Test
public void testTagsWithAppendAndIncrement() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
byte[] f = Bytes.toBytes("f");
byte[] q = Bytes.toBytes("q");
byte[] row1 = Bytes.toBytes("r1");
byte[] row2 = Bytes.toBytes("r2");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(ColumnFamilyDescriptorBuilder.of(f)).build();
TEST_UTIL.getAdmin().createTable(tableDescriptor);
Table table = null;
try {
table = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(row1);
byte[] v = Bytes.toBytes(2L);
put.addColumn(f, q, v);
put.setAttribute("visibility", Bytes.toBytes("tag1"));
table.put(put);
Increment increment = new Increment(row1);
increment.addColumn(f, q, 1L);
table.increment(increment);
TestCoprocessorForTags.checkTagPresence = true;
ResultScanner scanner = table.getScanner(new Scan());
Result result = scanner.next();
KeyValue kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
List<Tag> tags = TestCoprocessorForTags.tags;
assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
increment = new Increment(row1);
increment.add(new KeyValue(row1, f, q, 1234L, v));
increment.setAttribute("visibility", Bytes.toBytes("tag2"));
table.increment(increment);
TestCoprocessorForTags.checkTagPresence = true;
scanner = table.getScanner(new Scan());
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(5L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(2, tags.size());
// We cannot assume the ordering of tags
List<String> tagValues = new ArrayList<>();
for (Tag tag : tags) {
tagValues.add(Bytes.toString(Tag.cloneValue(tag)));
}
assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2"));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
put = new Put(row2);
v = Bytes.toBytes(2L);
put.addColumn(f, q, v);
table.put(put);
increment = new Increment(row2);
increment.add(new KeyValue(row2, f, q, 1234L, v));
increment.setAttribute("visibility", Bytes.toBytes("tag2"));
table.increment(increment);
TestCoprocessorForTags.checkTagPresence = true;
scanner = table.getScanner(new Scan().withStartRow(row2));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
// Test Append
byte[] row3 = Bytes.toBytes("r3");
put = new Put(row3);
put.addColumn(f, q, Bytes.toBytes("a"));
put.setAttribute("visibility", Bytes.toBytes("tag1"));
table.put(put);
Append append = new Append(row3);
append.addColumn(f, q, Bytes.toBytes("b"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
scanner = table.getScanner(new Scan().withStartRow(row3));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
append = new Append(row3);
append.add(new KeyValue(row3, f, q, 1234L, v));
append.setAttribute("visibility", Bytes.toBytes("tag2"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
scanner = table.getScanner(new Scan().withStartRow(row3));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(2, tags.size());
// We cannot assume the ordering of tags
tagValues.clear();
for (Tag tag : tags) {
tagValues.add(Bytes.toString(Tag.cloneValue(tag)));
}
assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2"));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
byte[] row4 = Bytes.toBytes("r4");
put = new Put(row4);
put.addColumn(f, q, Bytes.toBytes("a"));
table.put(put);
append = new Append(row4);
append.add(new KeyValue(row4, f, q, 1234L, v));
append.setAttribute("visibility", Bytes.toBytes("tag2"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
scanner = table.getScanner(new Scan().withStartRow(row4));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0))));
} finally {
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
if (table != null) {
table.close();
}
}
}
use of org.apache.hadoop.hbase.client.Append in project hbase by apache.
the class MultiThreadedUpdater method mutate.
public void mutate(Table table, Mutation m, long keyBase, byte[] row, byte[] cf, byte[] q, byte[] v) {
long start = EnvironmentEdgeManager.currentTime();
try {
m = dataGenerator.beforeMutate(keyBase, m);
if (m instanceof Increment) {
table.increment((Increment) m);
} else if (m instanceof Append) {
table.append((Append) m);
} else if (m instanceof Put) {
table.checkAndMutate(row, cf).qualifier(q).ifEquals(v).thenPut((Put) m);
} else if (m instanceof Delete) {
table.checkAndMutate(row, cf).qualifier(q).ifEquals(v).thenDelete((Delete) m);
} else {
throw new IllegalArgumentException("unsupported mutation " + m.getClass().getSimpleName());
}
totalOpTimeMs.addAndGet(EnvironmentEdgeManager.currentTime() - start);
} catch (IOException e) {
failedKeySet.add(keyBase);
String exceptionInfo;
if (e instanceof RetriesExhaustedWithDetailsException) {
RetriesExhaustedWithDetailsException aggEx = (RetriesExhaustedWithDetailsException) e;
exceptionInfo = aggEx.getExhaustiveDescription();
} else {
StringWriter stackWriter = new StringWriter();
PrintWriter pw = new PrintWriter(stackWriter);
e.printStackTrace(pw);
pw.flush();
exceptionInfo = StringUtils.stringifyException(e);
}
LOG.error("Failed to mutate: " + keyBase + " after " + (EnvironmentEdgeManager.currentTime() - start) + "ms; region information: " + getRegionDebugInfoSafe(table, m.getRow()) + "; errors: " + exceptionInfo);
}
}
use of org.apache.hadoop.hbase.client.Append in project hbase by apache.
the class TestAtomicOperation method testAppendWithNonExistingFamily.
@Test
public void testAppendWithNonExistingFamily() throws IOException {
initHRegion(tableName, name.getMethodName(), fam1);
final String v1 = "Value";
final Append a = new Append(row);
a.addColumn(fam1, qual1, Bytes.toBytes(v1));
a.addColumn(fam2, qual2, Bytes.toBytes(v1));
Result result = null;
try {
result = region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
fail("Append operation should fail with NoSuchColumnFamilyException.");
} catch (NoSuchColumnFamilyException e) {
assertEquals(null, result);
} catch (Exception e) {
fail("Append operation should fail with NoSuchColumnFamilyException.");
}
}
use of org.apache.hadoop.hbase.client.Append in project hbase by apache.
the class TestAtomicOperation method testAppendWithMultipleFamilies.
@Test
public void testAppendWithMultipleFamilies() throws IOException {
final byte[] fam3 = Bytes.toBytes("colfamily31");
initHRegion(tableName, name.getMethodName(), fam1, fam2, fam3);
String v1 = "Appended";
String v2 = "Value";
Append a = new Append(row);
a.setReturnResults(false);
a.addColumn(fam1, qual1, Bytes.toBytes(v1));
a.addColumn(fam2, qual2, Bytes.toBytes(v2));
Result result = region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
assertTrue("Expected an empty result but result contains " + result.size() + " keys", result.isEmpty());
a = new Append(row);
a.addColumn(fam2, qual2, Bytes.toBytes(v1));
a.addColumn(fam1, qual1, Bytes.toBytes(v2));
a.addColumn(fam3, qual3, Bytes.toBytes(v2));
a.addColumn(fam1, qual2, Bytes.toBytes(v1));
result = region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
byte[] actualValue1 = result.getValue(fam1, qual1);
byte[] actualValue2 = result.getValue(fam2, qual2);
byte[] actualValue3 = result.getValue(fam3, qual3);
byte[] actualValue4 = result.getValue(fam1, qual2);
assertNotNull("Value1 should bot be null", actualValue1);
assertNotNull("Value2 should bot be null", actualValue2);
assertNotNull("Value3 should bot be null", actualValue3);
assertNotNull("Value4 should bot be null", actualValue4);
assertEquals(0, Bytes.compareTo(Bytes.toBytes(v1 + v2), actualValue1));
assertEquals(0, Bytes.compareTo(Bytes.toBytes(v2 + v1), actualValue2));
assertEquals(0, Bytes.compareTo(Bytes.toBytes(v2), actualValue3));
assertEquals(0, Bytes.compareTo(Bytes.toBytes(v1), actualValue4));
}
use of org.apache.hadoop.hbase.client.Append in project hbase by apache.
the class TestRegionObserverInterface method testPreWALAppendHook.
// called from testPreWALAppendIsWrittenToWAL
private void testPreWALAppendHook(Table table, TableName tableName) throws IOException {
int expectedCalls = 0;
String[] methodArray = new String[1];
methodArray[0] = "getCtPreWALAppend";
Object[] resultArray = new Object[1];
Put p = new Put(ROW);
p.addColumn(A, A, A);
table.put(p);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Append a = new Append(ROW);
a.addColumn(B, B, B);
table.append(a);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Increment i = new Increment(ROW);
i.addColumn(C, C, 1);
table.increment(i);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
Delete d = new Delete(ROW);
table.delete(d);
resultArray[0] = ++expectedCalls;
verifyMethodResult(SimpleRegionObserver.class, methodArray, tableName, resultArray);
}
Aggregations