Search in sources :

Example 11 with QualifierFilter

use of org.apache.hadoop.hbase.filter.QualifierFilter in project hadoop by apache.

the class FlowRunEntityReader method constructFilterListBasedOnFields.

@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
    FilterList list = new FilterList(Operator.MUST_PASS_ONE);
    // By default fetch everything in INFO column family.
    FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
    TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
    // Metrics are always returned if we are reading a single entity.
    if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
        FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
        infoColFamilyList.addFilter(infoColumnFamily);
        infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
        list.addFilter(infoColFamilyList);
    } else {
        // Check if metricsToRetrieve are specified and if they are, create a
        // filter list for info column family by adding flow run tables columns
        // and a list for metrics to retrieve. Pls note that fieldsToRetrieve
        // will have METRICS added to it if metricsToRetrieve are specified
        // (in augmentParams()).
        TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
        if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
            FilterList infoColFamilyList = new FilterList();
            infoColFamilyList.addFilter(infoColumnFamily);
            FilterList columnsList = updateFixedColumns();
            columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
            infoColFamilyList.addFilter(columnsList);
            list.addFilter(infoColFamilyList);
        }
    }
    return list;
}
Also used : BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 12 with QualifierFilter

use of org.apache.hadoop.hbase.filter.QualifierFilter in project hadoop by apache.

the class TimelineEntityReader method createFiltersFromColumnQualifiers.

/**
   * Create a filter list of qualifier filters based on passed set of columns.
   *
   * @param <T> Describes the type of column prefix.
   * @param colPrefix Column Prefix.
   * @param columns set of column qualifiers.
   * @return filter list.
   */
protected <T> FilterList createFiltersFromColumnQualifiers(ColumnPrefix<T> colPrefix, Set<String> columns) {
    FilterList list = new FilterList(Operator.MUST_PASS_ONE);
    for (String column : columns) {
        // For columns which have compound column qualifiers (eg. events), we need
        // to include the required separator.
        byte[] compoundColQual = createColQualifierPrefix(colPrefix, column);
        list.addFilter(new QualifierFilter(CompareOp.EQUAL, new BinaryPrefixComparator(colPrefix.getColumnPrefixBytes(compoundColQual))));
    }
    return list;
}
Also used : BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) FilterList(org.apache.hadoop.hbase.filter.FilterList) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 13 with QualifierFilter

use of org.apache.hadoop.hbase.filter.QualifierFilter in project hbase by apache.

the class AccessControlLists method removeTablePermissions.

private static void removeTablePermissions(TableName tableName, byte[] column, Table table, boolean closeTable) throws IOException {
    Scan scan = new Scan();
    scan.addFamily(ACL_LIST_FAMILY);
    String columnName = Bytes.toString(column);
    scan.setFilter(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(String.format("(%s%s%s)|(%s%s)$", ACL_KEY_DELIMITER, columnName, ACL_KEY_DELIMITER, ACL_KEY_DELIMITER, columnName))));
    Set<byte[]> qualifierSet = new TreeSet<>(Bytes.BYTES_COMPARATOR);
    ResultScanner scanner = null;
    try {
        scanner = table.getScanner(scan);
        for (Result res : scanner) {
            for (byte[] q : res.getFamilyMap(ACL_LIST_FAMILY).navigableKeySet()) {
                qualifierSet.add(q);
            }
        }
        if (qualifierSet.size() > 0) {
            Delete d = new Delete(tableName.getName());
            for (byte[] qualifier : qualifierSet) {
                d.addColumns(ACL_LIST_FAMILY, qualifier);
            }
            table.delete(d);
        }
    } finally {
        if (scanner != null)
            scanner.close();
        if (closeTable)
            table.close();
    }
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) Delete(org.apache.hadoop.hbase.client.Delete) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) TreeSet(java.util.TreeSet) Scan(org.apache.hadoop.hbase.client.Scan) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Result(org.apache.hadoop.hbase.client.Result)

Example 14 with QualifierFilter

use of org.apache.hadoop.hbase.filter.QualifierFilter in project pinpoint by naver.

the class HbaseApplicationTraceIndexDao method makeResponseTimeFilter.

/**
     * make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
     * 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
     *
     * @param area
     * @param offsetTransactionId
     * @param offsetTransactionElapsed
     * @return
     */
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
    // filter by response time
    ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
    byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
    byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
    FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
    filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
    filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));
    // add offset
    if (offsetTransactionId != null) {
        final Buffer buffer = new AutomaticBuffer(32);
        buffer.putInt(offsetTransactionElapsed);
        buffer.putPrefixedString(offsetTransactionId.getAgentId());
        buffer.putSVLong(offsetTransactionId.getAgentStartTime());
        buffer.putVLong(offsetTransactionId.getTransactionSequence());
        byte[] qualifierOffset = buffer.getBuffer();
        filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
    }
    return filterList;
}
Also used : AutomaticBuffer(com.navercorp.pinpoint.common.buffer.AutomaticBuffer) Buffer(com.navercorp.pinpoint.common.buffer.Buffer) BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) ResponseTimeRange(com.navercorp.pinpoint.web.vo.ResponseTimeRange) AutomaticBuffer(com.navercorp.pinpoint.common.buffer.AutomaticBuffer) FilterList(org.apache.hadoop.hbase.filter.FilterList) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 15 with QualifierFilter

use of org.apache.hadoop.hbase.filter.QualifierFilter in project hbase by apache.

the class TestScannersWithFilters method testSkipFilter.

@Test
public void testSkipFilter() throws Exception {
    // Test for qualifier regex: "testQualifierOne-2"
    // Should only get rows from second group, and all keys
    Filter f = new SkipFilter(new QualifierFilter(CompareOperator.NOT_EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
    Scan s = new Scan();
    s.setFilter(f);
    KeyValue[] kvs = { // testRowTwo-0
    new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-2
    new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-3
    new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) };
    verifyScanFull(s, kvs);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) InclusiveStopFilter(org.apache.hadoop.hbase.filter.InclusiveStopFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) PageFilter(org.apache.hadoop.hbase.filter.PageFilter) Filter(org.apache.hadoop.hbase.filter.Filter) ValueFilter(org.apache.hadoop.hbase.filter.ValueFilter) SkipFilter(org.apache.hadoop.hbase.filter.SkipFilter) SkipFilter(org.apache.hadoop.hbase.filter.SkipFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Test(org.junit.Test)

Aggregations

QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)21 Test (org.junit.Test)12 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)11 FilterList (org.apache.hadoop.hbase.filter.FilterList)11 Filter (org.apache.hadoop.hbase.filter.Filter)9 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)8 Scan (org.apache.hadoop.hbase.client.Scan)7 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)6 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)6 TableName (org.apache.hadoop.hbase.TableName)5 FirstKeyOnlyFilter (org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter)5 InclusiveStopFilter (org.apache.hadoop.hbase.filter.InclusiveStopFilter)5 KeyValue (org.apache.hadoop.hbase.KeyValue)4 BinaryPrefixComparator (org.apache.hadoop.hbase.filter.BinaryPrefixComparator)4 PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)4 TimestampsFilter (org.apache.hadoop.hbase.filter.TimestampsFilter)4 ValueFilter (org.apache.hadoop.hbase.filter.ValueFilter)4 Cell (org.apache.hadoop.hbase.Cell)3 MultiRowMutationEndpoint (org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint)3 KeyOnlyFilter (org.apache.hadoop.hbase.filter.KeyOnlyFilter)3