use of org.apache.hadoop.hbase.filter.QualifierFilter in project hadoop by apache.
the class FlowRunEntityReader method constructFilterListBasedOnFields.
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
// By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Metrics are always returned if we are reading a single entity.
if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
infoColFamilyList.addFilter(infoColumnFamily);
infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
list.addFilter(infoColFamilyList);
} else {
// Check if metricsToRetrieve are specified and if they are, create a
// filter list for info column family by adding flow run tables columns
// and a list for metrics to retrieve. Pls note that fieldsToRetrieve
// will have METRICS added to it if metricsToRetrieve are specified
// (in augmentParams()).
TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
FilterList infoColFamilyList = new FilterList();
infoColFamilyList.addFilter(infoColumnFamily);
FilterList columnsList = updateFixedColumns();
columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
infoColFamilyList.addFilter(columnsList);
list.addFilter(infoColFamilyList);
}
}
return list;
}
use of org.apache.hadoop.hbase.filter.QualifierFilter in project hadoop by apache.
the class TimelineEntityReader method createFiltersFromColumnQualifiers.
/**
* Create a filter list of qualifier filters based on passed set of columns.
*
* @param <T> Describes the type of column prefix.
* @param colPrefix Column Prefix.
* @param columns set of column qualifiers.
* @return filter list.
*/
protected <T> FilterList createFiltersFromColumnQualifiers(ColumnPrefix<T> colPrefix, Set<String> columns) {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
for (String column : columns) {
// For columns which have compound column qualifiers (eg. events), we need
// to include the required separator.
byte[] compoundColQual = createColQualifierPrefix(colPrefix, column);
list.addFilter(new QualifierFilter(CompareOp.EQUAL, new BinaryPrefixComparator(colPrefix.getColumnPrefixBytes(compoundColQual))));
}
return list;
}
use of org.apache.hadoop.hbase.filter.QualifierFilter in project hbase by apache.
the class AccessControlLists method removeTablePermissions.
private static void removeTablePermissions(TableName tableName, byte[] column, Table table, boolean closeTable) throws IOException {
Scan scan = new Scan();
scan.addFamily(ACL_LIST_FAMILY);
String columnName = Bytes.toString(column);
scan.setFilter(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(String.format("(%s%s%s)|(%s%s)$", ACL_KEY_DELIMITER, columnName, ACL_KEY_DELIMITER, ACL_KEY_DELIMITER, columnName))));
Set<byte[]> qualifierSet = new TreeSet<>(Bytes.BYTES_COMPARATOR);
ResultScanner scanner = null;
try {
scanner = table.getScanner(scan);
for (Result res : scanner) {
for (byte[] q : res.getFamilyMap(ACL_LIST_FAMILY).navigableKeySet()) {
qualifierSet.add(q);
}
}
if (qualifierSet.size() > 0) {
Delete d = new Delete(tableName.getName());
for (byte[] qualifier : qualifierSet) {
d.addColumns(ACL_LIST_FAMILY, qualifier);
}
table.delete(d);
}
} finally {
if (scanner != null)
scanner.close();
if (closeTable)
table.close();
}
}
use of org.apache.hadoop.hbase.filter.QualifierFilter in project pinpoint by naver.
the class HbaseApplicationTraceIndexDao method makeResponseTimeFilter.
/**
* make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
* 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
*
* @param area
* @param offsetTransactionId
* @param offsetTransactionElapsed
* @return
*/
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
// filter by response time
ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));
// add offset
if (offsetTransactionId != null) {
final Buffer buffer = new AutomaticBuffer(32);
buffer.putInt(offsetTransactionElapsed);
buffer.putPrefixedString(offsetTransactionId.getAgentId());
buffer.putSVLong(offsetTransactionId.getAgentStartTime());
buffer.putVLong(offsetTransactionId.getTransactionSequence());
byte[] qualifierOffset = buffer.getBuffer();
filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
}
return filterList;
}
use of org.apache.hadoop.hbase.filter.QualifierFilter in project hbase by apache.
the class TestScannersWithFilters method testSkipFilter.
@Test
public void testSkipFilter() throws Exception {
// Test for qualifier regex: "testQualifierOne-2"
// Should only get rows from second group, and all keys
Filter f = new SkipFilter(new QualifierFilter(CompareOperator.NOT_EQUAL, new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
Scan s = new Scan();
s.setFilter(f);
KeyValue[] kvs = { // testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-2
new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), // testRowTwo-3
new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) };
verifyScanFull(s, kvs);
}
Aggregations