Search in sources :

Example 16 with BinaryComparator

use of org.apache.hadoop.hbase.filter.BinaryComparator in project hbase by apache.

the class TestScannersWithFilters method testQualifierFilter.

@Test
public void testQualifierFilter() throws Exception {
    // Match two keys (one from each family) in half the rows
    long expectedRows = numRows / 2;
    long expectedKeys = 2;
    Filter f = new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    Scan s = new Scan();
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys less than same qualifier
    // Expect only two keys (one from each family) in half the rows
    expectedRows = numRows / 2;
    expectedKeys = 2;
    f = new QualifierFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    s = new Scan();
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys less than or equal
    // Expect four keys (two from each family) in half the rows
    expectedRows = numRows / 2;
    expectedKeys = 4;
    f = new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    s = new Scan();
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys not equal
    // Expect four keys (two from each family)
    // Only look in first group of rows
    expectedRows = numRows / 2;
    expectedKeys = 4;
    f = new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys greater or equal
    // Expect four keys (two from each family)
    // Only look in first group of rows
    expectedRows = numRows / 2;
    expectedKeys = 4;
    f = new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys greater
    // Expect two keys (one from each family)
    // Only look in first group of rows
    expectedRows = numRows / 2;
    expectedKeys = 2;
    f = new QualifierFilter(CompareOp.GREATER, new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
    s.setFilter(f);
    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
    // Match keys not equal to
    // Look across rows and fully validate the keys and ordering
    // Expect varied numbers of keys, 4 per row in group one, 6 per row in
    // group two
    f = new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryComparator(QUALIFIERS_ONE[2]));
    s = new Scan();
    s.setFilter(f);
    KeyValue[] kvs = { // testRowOne-0
    new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-2
    new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-3
    new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowTwo-0
    new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-2
    new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-3
    new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) };
    verifyScanFull(s, kvs);
    // Test across rows and groups with a regex
    // Filter out "test*-2"
    // Expect 4 keys per row across both groups
    f = new QualifierFilter(CompareOp.NOT_EQUAL, new RegexStringComparator("test.+-2"));
    s = new Scan();
    s.setFilter(f);
    kvs = new KeyValue[] { // testRowOne-0
    new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-2
    new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowOne-3
    new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]), // testRowTwo-0
    new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-2
    new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-3
    new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) };
    verifyScanFull(s, kvs);
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) KeyValue(org.apache.hadoop.hbase.KeyValue) InclusiveStopFilter(org.apache.hadoop.hbase.filter.InclusiveStopFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ValueFilter(org.apache.hadoop.hbase.filter.ValueFilter) SkipFilter(org.apache.hadoop.hbase.filter.SkipFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Test(org.junit.Test)

Example 17 with BinaryComparator

use of org.apache.hadoop.hbase.filter.BinaryComparator in project pinpoint by naver.

the class HbaseAgentEventDao method getAgentEvents.

@Override
public List<AgentEventBo> getAgentEvents(String agentId, Range range, Set<AgentEventType> excludeEventTypes) {
    if (agentId == null) {
        throw new NullPointerException("agentId must not be null");
    }
    if (range == null) {
        throw new NullPointerException("range must not be null");
    }
    Scan scan = new Scan();
    scan.setMaxVersions(1);
    scan.setCaching(SCANNER_CACHE_SIZE);
    scan.setStartRow(createRowKey(agentId, range.getTo()));
    scan.setStopRow(createRowKey(agentId, range.getFrom()));
    scan.addFamily(HBaseTables.AGENT_EVENT_CF_EVENTS);
    if (!CollectionUtils.isEmpty(excludeEventTypes)) {
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        for (AgentEventType excludeEventType : excludeEventTypes) {
            byte[] excludeQualifier = Bytes.toBytes(excludeEventType.getCode());
            filterList.addFilter(new QualifierFilter(CompareFilter.CompareOp.NOT_EQUAL, new BinaryComparator(excludeQualifier)));
        }
        scan.setFilter(filterList);
    }
    List<AgentEventBo> agentEvents = this.hbaseOperations2.find(HBaseTables.AGENT_EVENT, scan, agentEventResultsExtractor);
    logger.debug("agentEvents found. {}", agentEvents);
    return agentEvents;
}
Also used : AgentEventType(com.navercorp.pinpoint.common.server.util.AgentEventType) AgentEventBo(com.navercorp.pinpoint.common.server.bo.AgentEventBo) Scan(org.apache.hadoop.hbase.client.Scan) FilterList(org.apache.hadoop.hbase.filter.FilterList) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 18 with BinaryComparator

use of org.apache.hadoop.hbase.filter.BinaryComparator in project phoenix by apache.

the class ScannerBuilder method getColumnFilters.

/**
   * @param columns columns to filter
   * @return filter that will skip any {@link KeyValue} that doesn't match one of the passed columns
   *         and the
   */
private Filter getColumnFilters(Collection<? extends ColumnReference> columns) {
    // each column needs to be added as an OR, so we need to separate them out
    FilterList columnFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE);
    // create a filter that matches each column reference
    for (ColumnReference ref : columns) {
        Filter columnFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ref.getFamily()));
        // combine with a match for the qualifier, if the qualifier is a specific qualifier
        if (!Bytes.equals(ColumnReference.ALL_QUALIFIERS, ref.getQualifier())) {
            columnFilter = new FilterList(columnFilter, new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(ref.getQualifier())));
        }
        columnFilters.addFilter(columnFilter);
    }
    return columnFilters;
}
Also used : ColumnTrackingNextLargestTimestampFilter(org.apache.phoenix.hbase.index.covered.filter.ColumnTrackingNextLargestTimestampFilter) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ApplyAndFilterDeletesFilter(org.apache.phoenix.hbase.index.covered.filter.ApplyAndFilterDeletesFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) ColumnReference(org.apache.phoenix.hbase.index.covered.update.ColumnReference) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 19 with BinaryComparator

use of org.apache.hadoop.hbase.filter.BinaryComparator in project phoenix by apache.

the class FirstLastValueBaseClientAggregator method aggregate.

@Override
public void aggregate(Tuple tuple, ImmutableBytesWritable ptr) {
    //if is called cause aggregation in ORDER BY clausule
    if (tuple instanceof SingleKeyValueTuple) {
        topValue = ptr.copyBytes();
        return;
    }
    FirstLastNthValueDataContainer payload = new FirstLastNthValueDataContainer();
    payload.setPayload(ptr.copyBytes());
    isAscending = payload.getIsAscending();
    TreeMap<byte[], LinkedList<byte[]>> serverAggregatorResult = payload.getData();
    if (useOffset) {
        //merge topValues
        for (Entry<byte[], LinkedList<byte[]>> entry : serverAggregatorResult.entrySet()) {
            byte[] itemKey = entry.getKey();
            LinkedList<byte[]> itemList = entry.getValue();
            if (topValues.containsKey(itemKey)) {
                topValues.get(itemKey).addAll(itemList);
            } else {
                topValues.put(itemKey, itemList);
            }
        }
    } else {
        Entry<byte[], LinkedList<byte[]>> valueEntry = serverAggregatorResult.firstEntry();
        byte[] currentOrder = valueEntry.getKey();
        boolean isBetter;
        if (isAscending) {
            isBetter = topOrder.compareTo(currentOrder) > 0;
        } else {
            //desc
            isBetter = topOrder.compareTo(currentOrder) < 0;
        }
        if (topOrder.getValue().length < 1 || isBetter) {
            topOrder = new BinaryComparator(currentOrder);
            topValue = valueEntry.getValue().getFirst();
        }
    }
}
Also used : FirstLastNthValueDataContainer(org.apache.phoenix.util.FirstLastNthValueDataContainer) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) LinkedList(java.util.LinkedList) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 20 with BinaryComparator

use of org.apache.hadoop.hbase.filter.BinaryComparator in project phoenix by apache.

the class FirstLastValueServerAggregator method aggregate.

@Override
public void aggregate(Tuple tuple, ImmutableBytesWritable ptr) {
    //set pointer to ordering by field
    orderByColumn.evaluate(tuple, ptr);
    byte[] currentOrder = ptr.copyBytes();
    if (!dataColumn.evaluate(tuple, ptr)) {
        return;
    }
    if (useOffset) {
        boolean addFlag = false;
        if (topValuesCount < offset) {
            try {
                addFlag = true;
            } catch (Exception e) {
                logger.error(e.getMessage());
            }
        } else {
            if (isAscending) {
                if (removeLastElement(currentOrder, topValues.lastKey(), -1)) {
                    addFlag = true;
                    topValuesCount--;
                }
            } else {
                if (removeLastElement(currentOrder, topValues.firstKey(), 1)) {
                    addFlag = true;
                    topValuesCount--;
                }
            }
        }
        if (addFlag) {
            topValuesCount++;
            if (!topValues.containsKey(currentOrder)) {
                topValues.put(currentOrder, new LinkedList<byte[]>());
            }
            //invert bytes if is SortOrder set
            if (hasValueDescSortOrder) {
                topValues.get(currentOrder).push(SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength()));
            } else {
                topValues.get(currentOrder).push(ptr.copyBytes());
            }
        }
    } else {
        boolean isHigher;
        if (isAscending) {
            isHigher = topOrder.compareTo(currentOrder) > 0;
        } else {
            //desc
            isHigher = topOrder.compareTo(currentOrder) < 0;
        }
        if (topOrder.getValue().length < 1 || isHigher) {
            if (hasValueDescSortOrder) {
                topValue = SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength());
            } else {
                topValue = ptr.copyBytes();
            }
            topOrder = new BinaryComparator(currentOrder);
        }
    }
}
Also used : IOException(java.io.IOException) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Aggregations

BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)42 Test (org.junit.Test)18 Filter (org.apache.hadoop.hbase.filter.Filter)15 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)14 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)13 Put (org.apache.hadoop.hbase.client.Put)12 Scan (org.apache.hadoop.hbase.client.Scan)9 FilterList (org.apache.hadoop.hbase.filter.FilterList)9 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)9 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)7 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)7 ArrayList (java.util.ArrayList)6 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)6 Cell (org.apache.hadoop.hbase.Cell)5 KeyValue (org.apache.hadoop.hbase.KeyValue)5 Delete (org.apache.hadoop.hbase.client.Delete)5 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)5 InclusiveStopFilter (org.apache.hadoop.hbase.filter.InclusiveStopFilter)5 Get (org.apache.hadoop.hbase.client.Get)4 Result (org.apache.hadoop.hbase.client.Result)4