use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.
the class TestAsyncTable method testCheckAndMutateWithFilterAndTimeRangeForOldApi.
@Test
@Deprecated
public void testCheckAndMutateWithFilterAndTimeRangeForOldApi() throws Throwable {
AsyncTable<?> table = getTable.get();
// Put with specifying the timestamp
table.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a"))).get();
// Put with success
boolean ok = table.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).timeRange(TimeRange.between(0, 101)).thenPut(new Put(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"))).get();
assertTrue(ok);
Result result = table.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B"))).get();
assertEquals("b", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
// Put with failure
ok = table.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).timeRange(TimeRange.between(0, 100)).thenPut(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c"))).get();
assertFalse(ok);
assertFalse(table.exists(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).get());
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.
the class TestAsyncTable method testCheckAndMutateBatchWithFilterAndTimeRange.
@Test
public void testCheckAndMutateBatchWithFilterAndTimeRange() throws Throwable {
AsyncTable<?> table = getTable.get();
byte[] row2 = Bytes.toBytes(Bytes.toString(row) + "2");
table.putAll(Arrays.asList(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a")).addColumn(FAMILY, Bytes.toBytes("B"), 100, Bytes.toBytes("b")).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")), new Put(row2).addColumn(FAMILY, Bytes.toBytes("D"), 100, Bytes.toBytes("d")).addColumn(FAMILY, Bytes.toBytes("E"), 100, Bytes.toBytes("e")).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("f")))).get();
CheckAndMutate checkAndMutate1 = CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b")))).timeRange(TimeRange.between(0, 101)).build(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("g")));
CheckAndMutate checkAndMutate2 = CheckAndMutate.newBuilder(row2).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("D"), CompareOperator.EQUAL, Bytes.toBytes("d")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("E"), CompareOperator.EQUAL, Bytes.toBytes("e")))).timeRange(TimeRange.between(0, 100)).build(new Put(row2).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("h")));
List<CheckAndMutateResult> results = table.checkAndMutateAll(Arrays.asList(checkAndMutate1, checkAndMutate2)).get();
assertTrue(results.get(0).isSuccess());
assertNull(results.get(0).getResult());
assertFalse(results.get(1).isSuccess());
assertNull(results.get(1).getResult());
Result result = table.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).get();
assertEquals("g", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("C"))));
result = table.get(new Get(row2).addColumn(FAMILY, Bytes.toBytes("F"))).get();
assertEquals("f", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("F"))));
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.
the class TestServerSideScanMetricsFromClientSide method testRowsSeenMetric.
private void testRowsSeenMetric(Scan baseScan) throws Exception {
Scan scan;
scan = new Scan(baseScan);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, NUM_ROWS);
for (int i = 0; i < ROWS.length - 1; i++) {
scan = new Scan(baseScan);
scan.withStartRow(ROWS[0]);
scan.withStopRow(ROWS[i + 1]);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, i + 1);
}
for (int i = ROWS.length - 1; i > 0; i--) {
scan = new Scan(baseScan);
scan.withStartRow(ROWS[i - 1]);
scan.withStopRow(ROWS[ROWS.length - 1]);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length - i);
}
// The filter should filter out all rows, but we still expect to see every row.
Filter filter = new RowFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("xyz")));
scan = new Scan(baseScan);
scan.setFilter(filter);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
// Filter should pass on all rows
SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.EQUAL, VALUE);
scan = new Scan(baseScan);
scan.setFilter(singleColumnValueFilter);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
// Filter should filter out all rows
singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.NOT_EQUAL, VALUE);
scan = new Scan(baseScan);
scan.setFilter(singleColumnValueFilter);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.
the class TestFromClientSide5 method testMultiRowMutationWithFilterConditionWhenConditionNotMatch.
@Test
public void testMultiRowMutationWithFilterConditionWhenConditionNotMatch() throws Exception {
final TableName tableName = name.getTableName();
final byte[] ROW1 = Bytes.toBytes("testRow1");
final byte[] ROW2 = Bytes.toBytes("testRow2");
final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
final byte[] VALUE1 = Bytes.toBytes("testValue1");
final byte[] VALUE2 = Bytes.toBytes("testValue2");
final byte[] VALUE3 = Bytes.toBytes("testValue3");
try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
// Add initial data
t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2).addColumn(FAMILY, QUALIFIER2, VALUE3));
// Execute MultiRowMutation with conditions
Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
Delete delete = new Delete(ROW2);
MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
mrmBuilder.addMutationRequest(m1);
mrmBuilder.addMutationRequest(m2);
mrmBuilder.addMutationRequest(m3);
mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, new FilterList(new SingleColumnValueFilter(FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2), new SingleColumnValueFilter(FAMILY, QUALIFIER2, CompareOperator.EQUAL, VALUE2)), null));
CoprocessorRpcChannel channel = t.coprocessorService(ROW);
MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
// Assert
assertFalse(response.getProcessed());
Result r = t.get(new Get(ROW));
assertTrue(r.isEmpty());
r = t.get(new Get(ROW1));
assertTrue(r.isEmpty());
r = t.get(new Get(ROW2));
assertEquals(Bytes.toString(VALUE2), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
}
}
use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.
the class TestFromClientSide5 method testMultiRowMutationWithFilterConditionWhenConditionMatches.
@Test
public void testMultiRowMutationWithFilterConditionWhenConditionMatches() throws Exception {
final TableName tableName = name.getTableName();
final byte[] ROW1 = Bytes.toBytes("testRow1");
final byte[] ROW2 = Bytes.toBytes("testRow2");
final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
final byte[] VALUE1 = Bytes.toBytes("testValue1");
final byte[] VALUE2 = Bytes.toBytes("testValue2");
final byte[] VALUE3 = Bytes.toBytes("testValue3");
try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
// Add initial data
t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2).addColumn(FAMILY, QUALIFIER2, VALUE3));
// Execute MultiRowMutation with conditions
Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
Delete delete = new Delete(ROW2);
MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
mrmBuilder.addMutationRequest(m1);
mrmBuilder.addMutationRequest(m2);
mrmBuilder.addMutationRequest(m3);
mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, new FilterList(new SingleColumnValueFilter(FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2), new SingleColumnValueFilter(FAMILY, QUALIFIER2, CompareOperator.EQUAL, VALUE3)), null));
CoprocessorRpcChannel channel = t.coprocessorService(ROW);
MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
// Assert
assertTrue(response.getProcessed());
Result r = t.get(new Get(ROW));
assertEquals(Bytes.toString(VALUE), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
r = t.get(new Get(ROW1));
assertEquals(Bytes.toString(VALUE1), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
r = t.get(new Get(ROW2));
assertTrue(r.isEmpty());
}
}
Aggregations