Search in sources :

Example 1 with Filter

use of com.google.cloud.bigtable.data.v2.models.Filters.Filter in project java-bigtable-hbase by googleapis.

the class ColumnPaginationFilterAdapter method createChain.

/**
 * Create a filter chain that allows the latest values for each qualifier, those cells that pass
 * an option intermediate filter and are less than the limit per row.
 */
private Filter createChain(ColumnPaginationFilter filter, Filter intermediate) {
    ChainFilter chain = FILTERS.chain();
    chain.filter(FILTERS.limit().cellsPerColumn(1));
    if (intermediate != null) {
        chain.filter(intermediate);
    }
    chain.filter(FILTERS.limit().cellsPerRow(filter.getLimit()));
    return chain;
}
Also used : ChainFilter(com.google.cloud.bigtable.data.v2.models.Filters.ChainFilter)

Example 2 with Filter

use of com.google.cloud.bigtable.data.v2.models.Filters.Filter in project java-bigtable-hbase by googleapis.

the class FuzzyRowFilterAdapter method adapt.

/**
 * {@inheritDoc}
 */
@Override
public Filter adapt(FilterAdapterContext context, FuzzyRowFilter filter) throws IOException {
    List<Pair<byte[], byte[]>> pairs = extractFuzzyRowFilterPairs(filter);
    if (pairs.isEmpty()) {
        return FILTERS.pass();
    }
    InterleaveFilter interleave = FILTERS.interleave();
    for (Pair<byte[], byte[]> pair : pairs) {
        Preconditions.checkArgument(pair.getFirst().length == pair.getSecond().length, "Fuzzy info and match mask must have the same length");
        interleave.filter(createSingleRowFilter(pair.getFirst(), pair.getSecond()));
    }
    return interleave;
}
Also used : InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter) Pair(org.apache.hadoop.hbase.util.Pair)

Example 3 with Filter

use of com.google.cloud.bigtable.data.v2.models.Filters.Filter in project java-bigtable-hbase by googleapis.

the class MultipleColumnPrefixFilterAdapter method adapt.

/**
 * {@inheritDoc}
 */
@Override
public Filter adapt(FilterAdapterContext context, MultipleColumnPrefixFilter filter) throws IOException {
    InterleaveFilter interleave = FILTERS.interleave();
    ByteString.Output outputStream = null;
    for (byte[] prefix : filter.getPrefix()) {
        if (outputStream == null) {
            outputStream = ByteString.newOutput(prefix.length * 2);
        } else {
            outputStream.reset();
        }
        ReaderExpressionHelper.writeQuotedExpression(outputStream, prefix);
        outputStream.write(ReaderExpressionHelper.ALL_QUALIFIERS_BYTES);
        interleave.filter(FILTERS.qualifier().regex(outputStream.toByteString()));
    }
    return interleave;
}
Also used : ByteString(com.google.protobuf.ByteString) InterleaveFilter(com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)

Example 4 with Filter

use of com.google.cloud.bigtable.data.v2.models.Filters.Filter in project java-bigtable-hbase by googleapis.

the class TestQualifierFilterAdapter method assertAdaptedForm.

private void assertAdaptedForm(ByteArrayComparable comparable, CompareFilter.CompareOp op, Filters.Filter expectedFilter) throws IOException {
    QualifierFilter filter = new QualifierFilter(op, comparable);
    Filters.Filter actualFilter = adapter.adapt(scanWithOnFamilyScanContext, filter);
    Assert.assertEquals(expectedFilter.toProto(), actualFilter.toProto());
}
Also used : Filters(com.google.cloud.bigtable.data.v2.models.Filters) QualifierFilter(org.apache.hadoop.hbase.filter.QualifierFilter)

Example 5 with Filter

use of com.google.cloud.bigtable.data.v2.models.Filters.Filter in project java-bigtable-hbase by googleapis.

the class TestSingleColumnValueFilterAdapter method latestVersionOnlyComparisonsAreDone.

@Test
public void latestVersionOnlyComparisonsAreDone() throws IOException {
    byte[] filterValue = Bytes.toBytes("foobar");
    byte[] qualifier = Bytes.toBytes("someColumn");
    byte[] family = Bytes.toBytes("f");
    SingleColumnValueFilter filter = new SingleColumnValueFilter(family, qualifier, CompareFilter.CompareOp.EQUAL, new BinaryComparator(filterValue));
    filter.setFilterIfMissing(false);
    filter.setLatestVersionOnly(true);
    Filters.Filter adaptedFilter = UNDER_TEST.adapt(new FilterAdapterContext(new Scan(), null), filter);
    assertFilterIfNotMIssingMatches(family, qualifier, filterValue, true, adaptedFilter);
}
Also used : Filters(com.google.cloud.bigtable.data.v2.models.Filters) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Scan(org.apache.hadoop.hbase.client.Scan) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)41 Filters (com.google.cloud.bigtable.data.v2.models.Filters)39 Filter (com.google.cloud.bigtable.data.v2.models.Filters.Filter)23 Scan (org.apache.hadoop.hbase.client.Scan)13 Query (com.google.cloud.bigtable.data.v2.models.Query)10 RowKeyWrapper (com.google.cloud.bigtable.hbase.util.RowKeyWrapper)10 Row (com.google.cloud.bigtable.data.v2.models.Row)9 MultiRowRangeFilter (org.apache.hadoop.hbase.filter.MultiRowRangeFilter)9 RowRange (org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange)9 ByteString (com.google.protobuf.ByteString)7 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)6 ChainFilter (com.google.cloud.bigtable.data.v2.models.Filters.ChainFilter)5 InterleaveFilter (com.google.cloud.bigtable.data.v2.models.Filters.InterleaveFilter)5 ReadModifyWriteRow (com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow)5 Result (org.apache.hadoop.hbase.client.Result)5 Filter (org.apache.hadoop.hbase.filter.Filter)5 QualifierFilter (org.apache.hadoop.hbase.filter.QualifierFilter)5 BigtableDataClient (com.google.cloud.bigtable.data.v2.BigtableDataClient)4 IOException (java.io.IOException)4 ValueFilter (org.apache.hadoop.hbase.filter.ValueFilter)4