Search in sources :

Example 61 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestCheckAndMutate method testCheckAndMutateBatchWithFilterAndTimeRange.

@Test
public void testCheckAndMutateBatchWithFilterAndTimeRange() throws Throwable {
    try (Table table = createTable()) {
        table.put(Arrays.asList(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a")).addColumn(FAMILY, Bytes.toBytes("B"), 100, Bytes.toBytes("b")).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")), new Put(ROWKEY2).addColumn(FAMILY, Bytes.toBytes("D"), 100, Bytes.toBytes("d")).addColumn(FAMILY, Bytes.toBytes("E"), 100, Bytes.toBytes("e")).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("f"))));
        CheckAndMutate checkAndMutate1 = CheckAndMutate.newBuilder(ROWKEY).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b")))).timeRange(TimeRange.between(0, 101)).build(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("g")));
        CheckAndMutate checkAndMutate2 = CheckAndMutate.newBuilder(ROWKEY2).ifMatches(new FilterList(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("D"), CompareOperator.EQUAL, Bytes.toBytes("d")), new SingleColumnValueFilter(FAMILY, Bytes.toBytes("E"), CompareOperator.EQUAL, Bytes.toBytes("e")))).timeRange(TimeRange.between(0, 100)).build(new Put(ROWKEY2).addColumn(FAMILY, Bytes.toBytes("F"), Bytes.toBytes("h")));
        List<CheckAndMutateResult> results = table.checkAndMutate(Arrays.asList(checkAndMutate1, checkAndMutate2));
        assertTrue(results.get(0).isSuccess());
        assertNull(results.get(0).getResult());
        assertFalse(results.get(1).isSuccess());
        assertNull(results.get(1).getResult());
        Result result = table.get(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("C")));
        assertEquals("g", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("C"))));
        result = table.get(new Get(ROWKEY2).addColumn(FAMILY, Bytes.toBytes("F")));
        assertEquals("f", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("F"))));
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) Test(org.junit.Test)

Example 62 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestCheckAndMutate method testCheckAndMutateWithSingleFilterForOldApi.

@Test
@Deprecated
public void testCheckAndMutateWithSingleFilterForOldApi() throws Throwable {
    try (Table table = createTable()) {
        // put one row
        putOneRow(table);
        // get row back and assert the values
        getOneRowAndAssertAllExist(table);
        // Put with success
        boolean ok = table.checkAndMutate(ROWKEY, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).thenPut(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d")));
        assertTrue(ok);
        Result result = table.get(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D")));
        assertEquals("d", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("D"))));
        // Put with failure
        ok = table.checkAndMutate(ROWKEY, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("b"))).thenPut(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("E"), Bytes.toBytes("e")));
        assertFalse(ok);
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("E"))));
        // Delete with success
        ok = table.checkAndMutate(ROWKEY, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).thenDelete(new Delete(ROWKEY).addColumns(FAMILY, Bytes.toBytes("D")));
        assertTrue(ok);
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"))));
        // Mutate with success
        ok = table.checkAndMutate(ROWKEY, new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b"))).thenMutate(new RowMutations(ROWKEY).add((Mutation) new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d"))).add((Mutation) new Delete(ROWKEY).addColumns(FAMILY, Bytes.toBytes("A"))));
        assertTrue(ok);
        result = table.get(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D")));
        assertEquals("d", Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("D"))));
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("A"))));
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Test(org.junit.Test)

Example 63 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestCheckAndMutate method testCheckAndMutateWithSingleFilter.

@Test
public void testCheckAndMutateWithSingleFilter() throws Throwable {
    try (Table table = createTable()) {
        // put one row
        putOneRow(table);
        // get row back and assert the values
        getOneRowAndAssertAllExist(table);
        // Put with success
        CheckAndMutateResult result = table.checkAndMutate(CheckAndMutate.newBuilder(ROWKEY).ifMatches(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).build(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d"))));
        assertTrue(result.isSuccess());
        assertNull(result.getResult());
        Result r = table.get(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D")));
        assertEquals("d", Bytes.toString(r.getValue(FAMILY, Bytes.toBytes("D"))));
        // Put with failure
        result = table.checkAndMutate(CheckAndMutate.newBuilder(ROWKEY).ifMatches(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("b"))).build(new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("E"), Bytes.toBytes("e"))));
        assertFalse(result.isSuccess());
        assertNull(result.getResult());
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("E"))));
        // Delete with success
        result = table.checkAndMutate(CheckAndMutate.newBuilder(ROWKEY).ifMatches(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL, Bytes.toBytes("a"))).build(new Delete(ROWKEY).addColumns(FAMILY, Bytes.toBytes("D"))));
        assertTrue(result.isSuccess());
        assertNull(result.getResult());
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"))));
        // Mutate with success
        result = table.checkAndMutate(CheckAndMutate.newBuilder(ROWKEY).ifMatches(new SingleColumnValueFilter(FAMILY, Bytes.toBytes("B"), CompareOperator.EQUAL, Bytes.toBytes("b"))).build(new RowMutations(ROWKEY).add((Mutation) new Put(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D"), Bytes.toBytes("d"))).add((Mutation) new Delete(ROWKEY).addColumns(FAMILY, Bytes.toBytes("A")))));
        assertTrue(result.isSuccess());
        assertNull(result.getResult());
        r = table.get(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("D")));
        assertEquals("d", Bytes.toString(r.getValue(FAMILY, Bytes.toBytes("D"))));
        assertFalse(table.exists(new Get(ROWKEY).addColumn(FAMILY, Bytes.toBytes("A"))));
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Test(org.junit.Test)

Example 64 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project drill by apache.

the class MapRDBFilterBuilder method createHBaseScanSpec.

private HBaseScanSpec createHBaseScanSpec(FunctionCall call, MaprDBCompareFunctionsProcessor processor) {
    String functionName = processor.getFunctionName();
    SchemaPath field = processor.getPath();
    byte[] fieldValue = processor.getValue();
    boolean sortOrderAscending = processor.isSortOrderAscending();
    boolean isRowKey = field.getRootSegmentPath().equals(ROW_KEY);
    if (!(isRowKey || (!field.getRootSegment().isLastPath() && field.getRootSegment().getChild().isLastPath() && field.getRootSegment().getChild().isNamed()))) {
        /*
       * if the field in this function is neither the row_key nor a qualified HBase column, return.
       */
        return null;
    }
    if (processor.isRowKeyPrefixComparison()) {
        return createRowKeyPrefixScanSpec(call, processor);
    }
    CompareOp compareOp = null;
    boolean isNullTest = false;
    ByteArrayComparable comparator = new BinaryComparator(fieldValue);
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    switch(functionName) {
        case FunctionNames.EQ:
            compareOp = CompareOp.EQUAL;
            if (isRowKey) {
                startRow = fieldValue;
                /* stopRow should be just greater than 'value'*/
                stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                compareOp = CompareOp.EQUAL;
            }
            break;
        case FunctionNames.NE:
            compareOp = CompareOp.NOT_EQUAL;
            break;
        case FunctionNames.GE:
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case FunctionNames.GT:
            if (sortOrderAscending) {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            }
            break;
        case FunctionNames.LE:
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS_OR_EQUAL;
                if (isRowKey) {
                    // stopRow should be just greater than 'value'
                    stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            } else {
                compareOp = CompareOp.GREATER_OR_EQUAL;
                if (isRowKey) {
                    startRow = fieldValue;
                }
            }
            break;
        case FunctionNames.LT:
            if (sortOrderAscending) {
                compareOp = CompareOp.LESS;
                if (isRowKey) {
                    stopRow = fieldValue;
                }
            } else {
                compareOp = CompareOp.GREATER;
                if (isRowKey) {
                    // startRow should be just greater than 'value'
                    startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
                }
            }
            break;
        case FunctionNames.IS_NULL:
        case "isNull":
        case "is null":
            if (isRowKey) {
                return null;
            }
            isNullTest = true;
            compareOp = CompareOp.EQUAL;
            comparator = new NullComparator();
            break;
        case FunctionNames.IS_NOT_NULL:
        case "isNotNull":
        case "is not null":
            if (isRowKey) {
                return null;
            }
            compareOp = CompareOp.NOT_EQUAL;
            comparator = new NullComparator();
            break;
        case "like":
            /*
       * Convert the LIKE operand to Regular Expression pattern so that we can
       * apply RegexStringComparator()
       */
            HBaseRegexParser parser = new HBaseRegexParser(call).parse();
            compareOp = CompareOp.EQUAL;
            comparator = new RegexStringComparator(parser.getRegexString());
            /*
       * We can possibly do better if the LIKE operator is on the row_key
       */
            if (isRowKey) {
                String prefix = parser.getPrefixString();
                if (prefix != null) {
                    /*
           * If there is a literal prefix, it can help us prune the scan to a sub range
           */
                    if (prefix.equals(parser.getLikeString())) {
                        /* The operand value is literal. This turns the LIKE operator to EQUAL operator */
                        startRow = stopRow = fieldValue;
                        compareOp = null;
                    } else {
                        startRow = prefix.getBytes(Charsets.UTF_8);
                        stopRow = startRow.clone();
                        boolean isMaxVal = true;
                        for (int i = stopRow.length - 1; i >= 0; --i) {
                            int nextByteValue = (0xff & stopRow[i]) + 1;
                            if (nextByteValue < 0xff) {
                                stopRow[i] = (byte) nextByteValue;
                                isMaxVal = false;
                                break;
                            } else {
                                stopRow[i] = 0;
                            }
                        }
                        if (isMaxVal) {
                            stopRow = HConstants.EMPTY_END_ROW;
                        }
                    }
                }
            }
            break;
    }
    if (compareOp != null || startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW) {
        Filter filter = null;
        if (isRowKey) {
            if (compareOp != null) {
                filter = new RowFilter(compareOp, comparator);
            }
        } else {
            byte[] family = HBaseUtils.getBytes(field.getRootSegment().getPath());
            byte[] qualifier = HBaseUtils.getBytes(field.getRootSegment().getChild().getNameSegment().getPath());
            filter = new SingleColumnValueFilter(family, qualifier, compareOp, comparator);
            ((SingleColumnValueFilter) filter).setLatestVersionOnly(true);
            if (!isNullTest) {
                ((SingleColumnValueFilter) filter).setFilterIfMissing(true);
            }
        }
        return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
    }
    // else
    return null;
}
Also used : HBaseScanSpec(org.apache.drill.exec.store.hbase.HBaseScanSpec) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) HBaseRegexParser(org.apache.drill.exec.store.hbase.HBaseRegexParser) NullComparator(org.apache.hadoop.hbase.filter.NullComparator) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) SchemaPath(org.apache.drill.common.expression.SchemaPath) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)

Example 65 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestRegionServerReadRequestMetrics method testReadRequestsCountWithFilter.

// HBASE-19785
@Ignore
@Test
public void testReadRequestsCountWithFilter() throws Exception {
    int resultCount;
    Scan scan;
    // test for scan
    scan = new Scan();
    scan.setFilter(new SingleColumnValueFilter(CF1, COL1, CompareOperator.EQUAL, VAL1));
    try (ResultScanner scanner = table.getScanner(scan)) {
        resultCount = 0;
        for (Result ignore : scanner) {
            resultCount++;
        }
        testReadRequests(resultCount, 2, 1);
    }
    // test for scan
    scan = new Scan();
    scan.setFilter(new RowFilter(CompareOperator.EQUAL, new BinaryComparator(ROW1)));
    try (ResultScanner scanner = table.getScanner(scan)) {
        resultCount = 0;
        for (Result ignore : scanner) {
            resultCount++;
        }
        testReadRequests(resultCount, 1, 2);
    }
    // test for scan
    scan = new Scan().withStartRow(ROW2).withStopRow(ROW3);
    scan.setFilter(new RowFilter(CompareOperator.EQUAL, new BinaryComparator(ROW1)));
    try (ResultScanner scanner = table.getScanner(scan)) {
        resultCount = 0;
        for (Result ignore : scanner) {
            resultCount++;
        }
        testReadRequests(resultCount, 0, 1);
    }
// fixme filtered get should not increase readRequestsCount
// Get get = new Get(ROW2);
// get.setFilter(new SingleColumnValueFilter(CF1, COL1, CompareFilter.CompareOp.EQUAL, VAL1));
// Result result = table.get(get);
// resultCount = result.isEmpty() ? 0 : 1;
// testReadRequests(resultCount, 0, 1);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Result(org.apache.hadoop.hbase.client.Result) Ignore(org.junit.Ignore) Test(org.junit.Test)

Aggregations

SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)71 Test (org.junit.Test)39 FilterList (org.apache.hadoop.hbase.filter.FilterList)28 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)16 Scan (org.apache.hadoop.hbase.client.Scan)15 Put (org.apache.hadoop.hbase.client.Put)13 Result (org.apache.hadoop.hbase.client.Result)13 Filter (org.apache.hadoop.hbase.filter.Filter)12 Delete (org.apache.hadoop.hbase.client.Delete)8 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)8 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)8 TableName (org.apache.hadoop.hbase.TableName)7 Table (org.apache.hadoop.hbase.client.Table)7 BitComparator (org.apache.hadoop.hbase.filter.BitComparator)7 ArrayList (java.util.ArrayList)6 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)6 Get (org.apache.hadoop.hbase.client.Get)6 Mutation (org.apache.hadoop.hbase.client.Mutation)6 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)6 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)6