use of org.apache.hadoop.hbase.filter.FamilyFilter in project hadoop by apache.
the class TimelineFilterUtils method createFilterForConfsOrMetricsToRetrieve.
/**
* Create filters for confs or metrics to retrieve. This list includes a
* configs/metrics family filter and relevant filters for confs/metrics to
* retrieve, if present.
*
* @param <T> Describes the type of column prefix.
* @param confsOrMetricToRetrieve configs/metrics to retrieve.
* @param columnFamily config or metric column family.
* @param columnPrefix config or metric column prefix.
* @return a filter list.
* @throws IOException if any problem occurs while creating the filters.
*/
public static <T> Filter createFilterForConfsOrMetricsToRetrieve(TimelineFilterList confsOrMetricToRetrieve, ColumnFamily<T> columnFamily, ColumnPrefix<T> columnPrefix) throws IOException {
Filter familyFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(columnFamily.getBytes()));
if (confsOrMetricToRetrieve != null && !confsOrMetricToRetrieve.getFilterList().isEmpty()) {
// If confsOrMetricsToRetrive are specified, create a filter list based
// on it and family filter.
FilterList filter = new FilterList(familyFilter);
filter.addFilter(createHBaseFilterList(columnPrefix, confsOrMetricToRetrieve));
return filter;
} else {
// Only the family filter needs to be added.
return familyFilter;
}
}
use of org.apache.hadoop.hbase.filter.FamilyFilter in project hadoop by apache.
the class ApplicationEntityReader method constructFilterListBasedOnFields.
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
if (!needCreateFilterListBasedOnFields()) {
// Fetch all the columns. No need of a filter.
return null;
}
FilterList listBasedOnFields = new FilterList(Operator.MUST_PASS_ONE);
FilterList infoColFamilyList = new FilterList();
// By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ApplicationColumnFamily.INFO.getBytes()));
infoColFamilyList.addFilter(infoColumnFamily);
if (!isSingleEntityRead() && fetchPartialColsFromInfoFamily()) {
// We can fetch only some of the columns from info family.
infoColFamilyList.addFilter(createFilterListForColsOfInfoFamily());
} else {
// Exclude column prefixes in info column family which are not required
// based on fields to retrieve.
excludeFieldsFromInfoColFamily(infoColFamilyList);
}
listBasedOnFields.addFilter(infoColFamilyList);
updateFilterForConfsAndMetricsToRetrieve(listBasedOnFields);
return listBasedOnFields;
}
use of org.apache.hadoop.hbase.filter.FamilyFilter in project phoenix by apache.
the class ScannerBuilder method getColumnFilters.
/**
* @param columns columns to filter
* @return filter that will skip any {@link KeyValue} that doesn't match one of the passed columns
* and the
*/
private Filter getColumnFilters(Collection<? extends ColumnReference> columns) {
// each column needs to be added as an OR, so we need to separate them out
FilterList columnFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE);
// create a filter that matches each column reference
for (ColumnReference ref : columns) {
Filter columnFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ref.getFamily()));
// combine with a match for the qualifier, if the qualifier is a specific qualifier
if (!Bytes.equals(ColumnReference.ALL_QUALIFIERS, ref.getQualifier())) {
columnFilter = new FilterList(columnFilter, new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(ref.getQualifier())));
}
columnFilters.addFilter(columnFilter);
}
return columnFilters;
}
use of org.apache.hadoop.hbase.filter.FamilyFilter in project hbase by apache.
the class TestAsyncTable method testCheckAndMutateWithTimestampFilter.
@Test
public void testCheckAndMutateWithTimestampFilter() throws Throwable {
AsyncTable<?> table = getTable.get();
// Put with specifying the timestamp
table.put(new Put(row).addColumn(FAMILY, Bytes.toBytes("A"), 100, Bytes.toBytes("a"))).get();
// Put with success
CheckAndMutateResult result = table.checkAndMutate(CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(FAMILY)), new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("A"))), new TimestampsFilter(Collections.singletonList(100L)))).build(new Put(row).addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b")))).get();
assertTrue(result.isSuccess());
assertNull(result.getResult());
Result r = table.get(new Get(row).addColumn(FAMILY, Bytes.toBytes("B"))).get();
assertEquals("b", Bytes.toString(r.getValue(FAMILY, Bytes.toBytes("B"))));
// Put with failure
result = table.checkAndMutate(CheckAndMutate.newBuilder(row).ifMatches(new FilterList(new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(FAMILY)), new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("A"))), new TimestampsFilter(Collections.singletonList(101L)))).build(new Put(row).addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c")))).get();
assertFalse(result.isSuccess());
assertNull(result.getResult());
assertFalse(table.exists(new Get(row).addColumn(FAMILY, Bytes.toBytes("C"))).get());
}
use of org.apache.hadoop.hbase.filter.FamilyFilter in project hadoop by apache.
the class FlowRunEntityReader method constructFilterListBasedOnFields.
@Override
protected FilterList constructFilterListBasedOnFields() throws IOException {
FilterList list = new FilterList(Operator.MUST_PASS_ONE);
// By default fetch everything in INFO column family.
FamilyFilter infoColumnFamily = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
// Metrics are always returned if we are reading a single entity.
if (!isSingleEntityRead() && !hasField(dataToRetrieve.getFieldsToRetrieve(), Field.METRICS)) {
FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
infoColFamilyList.addFilter(infoColumnFamily);
infoColFamilyList.addFilter(new QualifierFilter(CompareOp.NOT_EQUAL, new BinaryPrefixComparator(FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(""))));
list.addFilter(infoColFamilyList);
} else {
// Check if metricsToRetrieve are specified and if they are, create a
// filter list for info column family by adding flow run tables columns
// and a list for metrics to retrieve. Pls note that fieldsToRetrieve
// will have METRICS added to it if metricsToRetrieve are specified
// (in augmentParams()).
TimelineFilterList metricsToRetrieve = dataToRetrieve.getMetricsToRetrieve();
if (metricsToRetrieve != null && !metricsToRetrieve.getFilterList().isEmpty()) {
FilterList infoColFamilyList = new FilterList();
infoColFamilyList.addFilter(infoColumnFamily);
FilterList columnsList = updateFixedColumns();
columnsList.addFilter(TimelineFilterUtils.createHBaseFilterList(FlowRunColumnPrefix.METRIC, metricsToRetrieve));
infoColFamilyList.addFilter(columnsList);
list.addFilter(infoColFamilyList);
}
}
return list;
}
Aggregations