Search in sources :

Example 1 with InterleaveFilter

use of com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter in project java-bigtable-hbase by googleapis.

the class FuzzyRowFilterAdapter method adapt.

/**
 * {@inheritDoc}
 */
@Override
public Filter adapt(FilterAdapterContext context, FuzzyRowFilter filter) throws IOException {
    List<Pair<byte[], byte[]>> pairs = extractFuzzyRowFilterPairs(filter);
    if (pairs.isEmpty()) {
        return FILTERS.pass();
    }
    InterleaveFilter interleave = FILTERS.interleave();
    for (Pair<byte[], byte[]> pair : pairs) {
        Preconditions.checkArgument(pair.getFirst().length == pair.getSecond().length, "Fuzzy info and match mask must have the same length");
        interleave.filter(createSingleRowFilter(pair.getFirst(), pair.getSecond()));
    }
    return interleave;
}
Also used : InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter) Pair(org.apache.hadoop.hbase.util.Pair)

Example 2 with InterleaveFilter

use of com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter in project java-bigtable-hbase by googleapis.

the class MultipleColumnPrefixFilterAdapter method adapt.

/**
 * {@inheritDoc}
 */
@Override
public Filter adapt(FilterAdapterContext context, MultipleColumnPrefixFilter filter) throws IOException {
    InterleaveFilter interleave = FILTERS.interleave();
    ByteString.Output outputStream = null;
    for (byte[] prefix : filter.getPrefix()) {
        if (outputStream == null) {
            outputStream = ByteString.newOutput(prefix.length * 2);
        } else {
            outputStream.reset();
        }
        ReaderExpressionHelper.writeQuotedExpression(outputStream, prefix);
        outputStream.write(ReaderExpressionHelper.ALL_QUALIFIERS_BYTES);
        interleave.filter(FILTERS.qualifier().regex(outputStream.toByteString()));
    }
    return interleave;
}
Also used : ByteString(com.google.protobuf.ByteString) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)

Example 3 with InterleaveFilter

use of com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter in project java-bigtable-hbase by googleapis.

the class ScanAdapter method createColFamilyTimeRange.

private Optional<Filters.Filter> createColFamilyTimeRange(Scan scan) {
    if (scan.getColumnFamilyTimeRange().isEmpty()) {
        return Optional.absent();
    }
    // Builds filter of the form
    // ("family1" & "rangeStart, rangeEnd") | ("family2" & "rangeStart2, rangeEnd2")
    InterleaveFilter interleave = FILTERS.interleave();
    Map<byte[], TimeRange> range = scan.getColumnFamilyTimeRange();
    for (Map.Entry<byte[], TimeRange> entry : range.entrySet()) {
        interleave.filter(FILTERS.chain().filter(createFamilyFilter(entry.getKey())).filter(createTimeRangeFilter(entry.getValue())));
    }
    return Optional.<Filters.Filter>of(interleave);
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter) ChainFilter(com.google.cloud.bigtable.data.v2.models.Filters.ChainFilter) TimestampRangeFilter(com.google.cloud.bigtable.data.v2.models.Filters.TimestampRangeFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Map(java.util.Map) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)

Example 4 with InterleaveFilter

use of com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter in project java-bigtable-hbase by googleapis.

the class ScanAdapter method createColumnFamilyFilter.

private Optional<Filters.Filter> createColumnFamilyFilter(Scan scan) {
    if (!scan.hasFamilies()) {
        return Optional.absent();
    }
    // Build a filter of the form:
    // (fam1 | (qual1 + qual2 + qual3)) + (fam2 | qual1) + (fam3)
    InterleaveFilter interleave = FILTERS.interleave();
    Map<byte[], NavigableSet<byte[]>> familyMap = scan.getFamilyMap();
    for (Map.Entry<byte[], NavigableSet<byte[]>> entry : familyMap.entrySet()) {
        Filters.Filter familyFilter = createFamilyFilter(entry.getKey());
        NavigableSet<byte[]> qualifiers = entry.getValue();
        // Add a qualifier filter for each specified qualifier:
        if (qualifiers != null) {
            InterleaveFilter columnFilters = FILTERS.interleave();
            for (byte[] qualifier : qualifiers) {
                columnFilters.filter(createColumnQualifierFilter(qualifier));
            }
            // Build filter of the form "family | (qual1 + qual2 + qual3)"
            interleave.filter(FILTERS.chain().filter(familyFilter).filter(columnFilters));
        } else {
            interleave.filter(familyFilter);
        }
    }
    return Optional.<Filters.Filter>of(interleave);
}
Also used : NavigableSet(java.util.NavigableSet) Filters(com.google.cloud.bigtable.data.v2.models.Filters) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter) ChainFilter(com.google.cloud.bigtable.data.v2.models.Filters.ChainFilter) TimestampRangeFilter(com.google.cloud.bigtable.data.v2.models.Filters.TimestampRangeFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Map(java.util.Map) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)

Aggregations

InterleaveFilter (com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)4 ChainFilter (com.google.cloud.bigtable.data.v2.models.Filters.ChainFilter)2 TimestampRangeFilter (com.google.cloud.bigtable.data.v2.models.Filters.TimestampRangeFilter)2 Map (java.util.Map)2 Filter (org.apache.hadoop.hbase.filter.Filter)2 Filters (com.google.cloud.bigtable.data.v2.models.Filters)1 ByteString (com.google.protobuf.ByteString)1 NavigableSet (java.util.NavigableSet)1 TimeRange (org.apache.hadoop.hbase.io.TimeRange)1 Pair (org.apache.hadoop.hbase.util.Pair)1