use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project pinpoint by naver.
the class HbaseTraceDaoV2 method createSpanQualifierFilter.
public Filter createSpanQualifierFilter() {
byte indexPrefix = SpanEncoder.TYPE_SPAN;
ByteArrayComparable prefixComparator = new BinaryPrefixComparator(new byte[] { indexPrefix });
Filter qualifierPrefixFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL, prefixComparator);
return qualifierPrefixFilter;
}
use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project hadoop by apache.
the class FlowRunEntityReader method constructFilterListBasedOnFields.
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
// By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Metrics are always returned if we are reading a single entity.
if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
infoColFamilyList.addFilter(infoColumnFamily);
infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
list.addFilter(infoColFamilyList);
} else {
// Check if metricsToRetrieve are specified and if they are, create a
// filter list for info column family by adding flow run tables columns
// and a list for metrics to retrieve. Pls note that fieldsToRetrieve
// will have METRICS added to it if metricsToRetrieve are specified
// (in augmentParams()).
TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
FilterList infoColFamilyList = new FilterList();
infoColFamilyList.addFilter(infoColumnFamily);
FilterList columnsList = updateFixedColumns();
columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
infoColFamilyList.addFilter(columnsList);
list.addFilter(infoColFamilyList);
}
}
return list;
}
use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project hadoop by apache.
the class TimelineEntityReader method createFiltersFromColumnQualifiers.
/**
* Create a filter list of qualifier filters based on passed set of columns.
*
* @param <T> Describes the type of column prefix.
* @param colPrefix Column Prefix.
* @param columns set of column qualifiers.
* @return filter list.
*/
protected <T> FilterList createFiltersFromColumnQualifiers(ColumnPrefix<T> colPrefix, Set<String> columns) {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
for (String column : columns) {
// For columns which have compound column qualifiers (eg. events), we need
// to include the required separator.
byte[] compoundColQual = createColQualifierPrefix(colPrefix, column);
list.addFilter(new QualifierFilter(CompareOp.EQUAL, new BinaryPrefixComparator(colPrefix.getColumnPrefixBytes(compoundColQual))));
}
return list;
}
use of org.apache.hadoop.hbase.filter.BinaryPrefixComparator in project pinpoint by naver.
the class HbaseApplicationTraceIndexDao method makeResponseTimeFilter.
/**
* make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
* 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
*
* @param area
* @param offsetTransactionId
* @param offsetTransactionElapsed
* @return
*/
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
// filter by response time
ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));
// add offset
if (offsetTransactionId != null) {
final Buffer buffer = new AutomaticBuffer(32);
buffer.putInt(offsetTransactionElapsed);
buffer.putPrefixedString(offsetTransactionId.getAgentId());
buffer.putSVLong(offsetTransactionId.getAgentStartTime());
buffer.putVLong(offsetTransactionId.getTransactionSequence());
byte[] qualifierOffset = buffer.getBuffer();
filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
}
return filterList;
}
Aggregations