Search in sources :

Example 51 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestAsyncTable method testCheckAndMutateWithFilterAndTimeRangeForOldApi.

@Test
@Deprecated
public void testCheckAndMutateWithFilterAndTimeRangeForOldApi() throws Throwable {
    AsyncTable<?> table = getTable.get();
    // Put with specifying the timestamp
    table.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a"))).get();
    // Put with success
    boolean ok = table.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).timeRange(TimeRange.between(0, 101)).thenPut(new Put(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"))).get();
    assertTrue(ok);
    Result result = table.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B"))).get();
    assertEquals("b", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))));
    // Put with failure
    ok = table.checkAndMutate(row, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).timeRange(TimeRange.between(0, 100)).thenPut(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c"))).get();
    assertFalse(ok);
    assertFalse(table.exists(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).get());
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Test(org.junit.Test)

Example 52 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestAsyncTable method testCheckAndMutateBatchWithFilterAndTimeRange.

@Test
public void testCheckAndMutateBatchWithFilterAndTimeRange() throws Throwable {
    AsyncTable<?> table = getTable.get();
    byte[] row2 = Bytes.toBytes(Bytes.toString(row) + "2");
    table.putAll(Arrays.asList(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a")).addColumn(FAMILY, Bytes.toBytes("B"), 100, Bytes.toBytes("b")).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")), new Put(row2).addColumn(FAMILY, Bytes.toBytes("D"), 100, Bytes.toBytes("d")).addColumn(FAMILY, Bytes.toBytes("E"), 100, Bytes.toBytes("e")).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("f")))).get();
    CheckAndMutate checkAndMutate1 = CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b")))).timeRange(TimeRange.between(0, 101)).build(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("g")));
    CheckAndMutate checkAndMutate2 = CheckAndMutate.newBuilder(row2).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("D"), CompareOperator.EQUAL, Bytes.toBytes("d")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("E"), CompareOperator.EQUAL, Bytes.toBytes("e")))).timeRange(TimeRange.between(0, 100)).build(new Put(row2).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("h")));
    List<CheckAndMutateResult> results = table.checkAndMutateAll(Arrays.asList(checkAndMutate1, checkAndMutate2)).get();
    assertTrue(results.get(0).isSuccess());
    assertNull(results.get(0).getResult());
    assertFalse(results.get(1).isSuccess());
    assertNull(results.get(1).getResult());
    Result result = table.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).get();
    assertEquals("g", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("C"))));
    result = table.get(new Get(row2).addColumn(FAMILY, Bytes.toBytes("F"))).get();
    assertEquals("f", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("F"))));
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) Test(org.junit.Test)

Example 53 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestServerSideScanMetricsFromClientSide method testRowsSeenMetric.

private void testRowsSeenMetric(Scan baseScan) throws Exception {
    Scan scan;
    scan = new Scan(baseScan);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, NUM_ROWS);
    for (int i = 0; i < ROWS.length - 1; i++) {
        scan = new Scan(baseScan);
        scan.withStartRow(ROWS[0]);
        scan.withStopRow(ROWS[i + 1]);
        testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, i + 1);
    }
    for (int i = ROWS.length - 1; i > 0; i--) {
        scan = new Scan(baseScan);
        scan.withStartRow(ROWS[i - 1]);
        scan.withStopRow(ROWS[ROWS.length - 1]);
        testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length - i);
    }
    // The filter should filter out all rows, but we still expect to see every row.
    Filter filter = new RowFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("xyz")));
    scan = new Scan(baseScan);
    scan.setFilter(filter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
    // Filter should pass on all rows
    SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.EQUAL, VALUE);
    scan = new Scan(baseScan);
    scan.setFilter(singleColumnValueFilter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
    // Filter should filter out all rows
    singleColumnValueFilter = new SingleColumnValueFilter(FAMILIES[0], QUALIFIERS[0], CompareOperator.NOT_EQUAL, VALUE);
    scan = new Scan(baseScan);
    scan.setFilter(singleColumnValueFilter);
    testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
}
Also used : RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ColumnPrefixFilter(org.apache.hadoop.hbase.filter.ColumnPrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) SingleColumnValueExcludeFilter(org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 54 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestFromClientSide5 method testMultiRowMutationWithFilterConditionWhenConditionNotMatch.

@Test
public void testMultiRowMutationWithFilterConditionWhenConditionNotMatch() throws Exception {
    final TableName tableName = name.getTableName();
    final byte[] ROW1 = Bytes.toBytes("testRow1");
    final byte[] ROW2 = Bytes.toBytes("testRow2");
    final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
    final byte[] VALUE1 = Bytes.toBytes("testValue1");
    final byte[] VALUE2 = Bytes.toBytes("testValue2");
    final byte[] VALUE3 = Bytes.toBytes("testValue3");
    try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
        // Add initial data
        t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2).addColumn(FAMILY, QUALIFIER2, VALUE3));
        // Execute MultiRowMutation with conditions
        Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
        MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
        Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
        MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
        Delete delete = new Delete(ROW2);
        MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
        mrmBuilder.addMutationRequest(m1);
        mrmBuilder.addMutationRequest(m2);
        mrmBuilder.addMutationRequest(m3);
        mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, new FilterList(new SingleColumnValueFilter(FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2), new SingleColumnValueFilter(FAMILY, QUALIFIER2, CompareOperator.EQUAL, VALUE2)), null));
        CoprocessorRpcChannel channel = t.coprocessorService(ROW);
        MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
        MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
        // Assert
        assertFalse(response.getProcessed());
        Result r = t.get(new Get(ROW));
        assertTrue(r.isEmpty());
        r = t.get(new Get(ROW1));
        assertTrue(r.isEmpty());
        r = t.get(new Get(ROW2));
        assertEquals(Bytes.toString(VALUE2), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) FilterList(org.apache.hadoop.hbase.filter.FilterList) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) TableName(org.apache.hadoop.hbase.TableName) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) Test(org.junit.Test)

Example 55 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestFromClientSide5 method testMultiRowMutationWithFilterConditionWhenConditionMatches.

@Test
public void testMultiRowMutationWithFilterConditionWhenConditionMatches() throws Exception {
    final TableName tableName = name.getTableName();
    final byte[] ROW1 = Bytes.toBytes("testRow1");
    final byte[] ROW2 = Bytes.toBytes("testRow2");
    final byte[] QUALIFIER2 = Bytes.toBytes("testQualifier2");
    final byte[] VALUE1 = Bytes.toBytes("testValue1");
    final byte[] VALUE2 = Bytes.toBytes("testValue2");
    final byte[] VALUE3 = Bytes.toBytes("testValue3");
    try (Table t = TEST_UTIL.createTable(tableName, FAMILY)) {
        // Add initial data
        t.put(new Put(ROW2).addColumn(FAMILY, QUALIFIER, VALUE2).addColumn(FAMILY, QUALIFIER2, VALUE3));
        // Execute MultiRowMutation with conditions
        Put put1 = new Put(ROW).addColumn(FAMILY, QUALIFIER, VALUE);
        MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, put1);
        Put put2 = new Put(ROW1).addColumn(FAMILY, QUALIFIER, VALUE1);
        MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, put2);
        Delete delete = new Delete(ROW2);
        MutationProto m3 = ProtobufUtil.toMutation(MutationType.DELETE, delete);
        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
        mrmBuilder.addMutationRequest(m1);
        mrmBuilder.addMutationRequest(m2);
        mrmBuilder.addMutationRequest(m3);
        mrmBuilder.addCondition(ProtobufUtil.toCondition(ROW2, new FilterList(new SingleColumnValueFilter(FAMILY, QUALIFIER, CompareOperator.EQUAL, VALUE2), new SingleColumnValueFilter(FAMILY, QUALIFIER2, CompareOperator.EQUAL, VALUE3)), null));
        CoprocessorRpcChannel channel = t.coprocessorService(ROW);
        MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel);
        MutateRowsResponse response = service.mutateRows(null, mrmBuilder.build());
        // Assert
        assertTrue(response.getProcessed());
        Result r = t.get(new Get(ROW));
        assertEquals(Bytes.toString(VALUE), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
        r = t.get(new Get(ROW1));
        assertEquals(Bytes.toString(VALUE1), Bytes.toString(r.getValue(FAMILY, QUALIFIER)));
        r = t.get(new Get(ROW2));
        assertTrue(r.isEmpty());
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) MultiRowMutationService(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService) FilterList(org.apache.hadoop.hbase.filter.FilterList) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) TableName(org.apache.hadoop.hbase.TableName) MutateRowsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) Test(org.junit.Test)

Aggregations

SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)71 Test (org.junit.Test)39 FilterList (org.apache.hadoop.hbase.filter.FilterList)28 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)16 Scan (org.apache.hadoop.hbase.client.Scan)15 Put (org.apache.hadoop.hbase.client.Put)13 Result (org.apache.hadoop.hbase.client.Result)13 Filter (org.apache.hadoop.hbase.filter.Filter)12 Delete (org.apache.hadoop.hbase.client.Delete)8 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)8 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)8 TableName (org.apache.hadoop.hbase.TableName)7 Table (org.apache.hadoop.hbase.client.Table)7 BitComparator (org.apache.hadoop.hbase.filter.BitComparator)7 ArrayList (java.util.ArrayList)6 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)6 Get (org.apache.hadoop.hbase.client.Get)6 Mutation (org.apache.hadoop.hbase.client.Mutation)6 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)6 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)6