Search in sources :

Example 1 with BinaryPrefixComparator

use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project pinpoint by naver.

the class HbaseTraceDaoV2 method createSpanQualifierFilter.

public Filter createSpanQualifierFilter() {
    byte indexPrefix = SpanEncoder.TYPE_SPAN;
    ByteArrayComparable prefixComparator = new BinaryPrefixComparator(new byte[] { indexPrefix });
    Filter qualifierPrefixFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL, prefixComparator);
    return qualifierPrefixFilter;
}
Also used : ByteArrayComparable(org.apache.hadoop.hbase.filter.ByteArrayComparable) BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) ColumnCountGetFilter(org.apache.hadoop.hbase.filter.ColumnCountGetFilter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter) Filter(org.apache.hadoop.hbase.filter.Filter) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 2 with BinaryPrefixComparator

use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project hadoop by apache.

the class FlowRunEntityReader method constructFilterListBasedOnFields.

@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
    FilterList list = new FilterList(Operator.MUST_PASS_ONE);
    // By default fetch everything in INFO column family.
    FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
    TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
    // Metrics are always returned if we are reading a single entity.
    if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
        FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
        infoColFamilyList.addFilter(infoColumnFamily);
        infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
        list.addFilter(infoColFamilyList);
    } else {
        // Check if metricsToRetrieve are specified and if they are, create a
        // filter list for info column family by adding flow run tables columns
        // and a list for metrics to retrieve. Pls note that fieldsToRetrieve
        // will have METRICS added to it if metricsToRetrieve are specified
        // (in augmentParams()).
        TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
        if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
            FilterList infoColFamilyList = new FilterList();
            infoColFamilyList.addFilter(infoColumnFamily);
            FilterList columnsList = updateFixedColumns();
            columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
            infoColFamilyList.addFilter(columnsList);
            list.addFilter(infoColFamilyList);
        }
    }
    return list;
}
Also used : BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FilterList(org.apache.hadoop.hbase.filter.FilterList) TimelineFilterList(org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList) FamilyFilter(org.apache.hadoop.hbase.filter.FamilyFilter) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 3 with BinaryPrefixComparator

use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project hadoop by apache.

the class TimelineEntityReader method createFiltersFromColumnQualifiers.

/**
   * Create a filter list of qualifier filters based on passed set of columns.
   *
   * @param <T> Describes the type of column prefix.
   * @param colPrefix Column Prefix.
   * @param columns set of column qualifiers.
   * @return filter list.
   */
protected <T> FilterList createFiltersFromColumnQualifiers(ColumnPrefix<T> colPrefix, Set<String> columns) {
    FilterList list = new FilterList(Operator.MUST_PASS_ONE);
    for (String column : columns) {
        // For columns which have compound column qualifiers (eg. events), we need
        // to include the required separator.
        byte[] compoundColQual = createColQualifierPrefix(colPrefix, column);
        list.addFilter(new QualifierFilter(CompareOp.EQUAL, new BinaryPrefixComparator(colPrefix.getColumnPrefixBytes(compoundColQual))));
    }
    return list;
}
Also used : BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) FilterList(org.apache.hadoop.hbase.filter.FilterList) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 4 with BinaryPrefixComparator

use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project pinpoint by naver.

the class HbaseApplicationTraceIndexDao method makeResponseTimeFilter.

/**
     * make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
     * 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
     *
     * @param area
     * @param offsetTransactionId
     * @param offsetTransactionElapsed
     * @return
     */
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
    // filter by response time
    ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
    byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
    byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
    FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
    filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
    filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));
    // add offset
    if (offsetTransactionId != null) {
        final Buffer buffer = new AutomaticBuffer(32);
        buffer.putInt(offsetTransactionElapsed);
        buffer.putPrefixedString(offsetTransactionId.getAgentId());
        buffer.putSVLong(offsetTransactionId.getAgentStartTime());
        buffer.putVLong(offsetTransactionId.getTransactionSequence());
        byte[] qualifierOffset = buffer.getBuffer();
        filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
    }
    return filterList;
}
Also used : AutomaticBuffer(com.navercorp.pinpoint.common.buffer.AutomaticBuffer) Buffer(com.navercorp.pinpoint.common.buffer.Buffer) BinaryPrefixComparator(org.apache.hadoop.hbase.filter.BinaryPrefixComparator) ResponseTimeRange(com.navercorp.pinpoint.web.vo.ResponseTimeRange) AutomaticBuffer(com.navercorp.pinpoint.common.buffer.AutomaticBuffer) FilterList(org.apache.hadoop.hbase.filter.FilterList) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Aggregations

BinaryPrefixComparator (org.apache.hadoop.hbase.filter.BinaryPrefixComparator)4 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)4 FilterList (org.apache.hadoop.hbase.filter.FilterList)3 AutomaticBuffer (com.navercorp.pinpoint.common.buffer.AutomaticBuffer)1 Buffer (com.navercorp.pinpoint.common.buffer.Buffer)1 ResponseTimeRange (com.navercorp.pinpoint.web.vo.ResponseTimeRange)1 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)1 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)1 ColumnCountGetFilter (org.apache.hadoop.hbase.filter.ColumnCountGetFilter)1 CompareFilter (org.apache.hadoop.hbase.filter.CompareFilter)1 FamilyFilter (org.apache.hadoop.hbase.filter.FamilyFilter)1 Filter (org.apache.hadoop.hbase.filter.Filter)1 TimelineDataToRetrieve (org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve)1 TimelineFilterList (org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList)1