Search in sources :

Example 1 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hadoop by apache.

the class TimelineFilterUtils method createHBaseSingleColValueFilter.

/**
   * Creates a HBase {@link SingleColumnValueFilter}.
   *
   * @param columnFamily Column Family represented as bytes.
   * @param columnQualifier Column Qualifier represented as bytes.
   * @param value Value.
   * @param compareOp Compare operator.
   * @param filterIfMissing This flag decides if we should filter the row if the
   *     specified column is missing. This is based on the filter's keyMustExist
   *     field.
   * @return a {@link SingleColumnValueFilter} object
   * @throws IOException
   */
private static SingleColumnValueFilter createHBaseSingleColValueFilter(byte[] columnFamily, byte[] columnQualifier, byte[] value, CompareOp compareOp, boolean filterIfMissing) throws IOException {
    SingleColumnValueFilter singleColValFilter = new SingleColumnValueFilter(columnFamily, columnQualifier, compareOp, new BinaryComparator(value));
    singleColValFilter.setLatestVersionOnly(true);
    singleColValFilter.setFilterIfMissing(filterIfMissing);
    return singleColValFilter;
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) BinaryComparator(org.apache.hadoop.hbase.filter.BinaryComparator)

Example 2 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class ReplicationTableBase method getQueuesBelongingToServer.

/**
   * Get the queue id's and meta data (Owner and History) for the queues belonging to the named
   * server
   *
   * @param server name of the server
   * @return a ResultScanner over the QueueIds belonging to the server
   * @throws IOException
   */
protected ResultScanner getQueuesBelongingToServer(String server) throws IOException {
    Scan scan = new Scan();
    SingleColumnValueFilter filterMyQueues = new SingleColumnValueFilter(CF_QUEUE, COL_QUEUE_OWNER, CompareFilter.CompareOp.EQUAL, Bytes.toBytes(server));
    scan.setFilter(filterMyQueues);
    scan.addColumn(CF_QUEUE, COL_QUEUE_OWNER);
    scan.addColumn(CF_QUEUE, COL_QUEUE_OWNER_HISTORY);
    try (Table replicationTable = getOrBlockOnReplicationTable()) {
        ResultScanner results = replicationTable.getScanner(scan);
        return results;
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan)

Example 3 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TableBasedReplicationQueuesImpl method getResultIfOwner.

/**
   * Attempts to run a Get on some queue. Will only return a non-null result if we currently own
   * the queue.
   *
   * @param get The Get that we want to query
   * @return The result of the Get if this server is the owner of the queue. Else it returns null.
   * @throws IOException
   */
private Result getResultIfOwner(Get get) throws IOException {
    Scan scan = new Scan(get);
    // Check if the Get currently contains all columns or only specific columns
    if (scan.getFamilyMap().size() > 0) {
        // Add the OWNER column if the scan is already only over specific columns
        scan.addColumn(CF_QUEUE, COL_QUEUE_OWNER);
    }
    scan.setMaxResultSize(1);
    SingleColumnValueFilter checkOwner = new SingleColumnValueFilter(CF_QUEUE, COL_QUEUE_OWNER, CompareFilter.CompareOp.EQUAL, serverNameBytes);
    scan.setFilter(checkOwner);
    ResultScanner scanner = null;
    try (Table replicationTable = getOrBlockOnReplicationTable()) {
        scanner = replicationTable.getScanner(scan);
        Result result = scanner.next();
        return (result == null || result.isEmpty()) ? null : result;
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Table(org.apache.hadoop.hbase.client.Table) Scan(org.apache.hadoop.hbase.client.Scan) Result(org.apache.hadoop.hbase.client.Result)

Example 4 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project hbase by apache.

the class TestFromClientSide method buildScanner.

private ResultScanner buildScanner(String keyPrefix, String value, Table ht) throws IOException {
    // OurFilterList allFilters = new OurFilterList();
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
    // allFilters.addFilter(new
    // RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
    // Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes("trans-blob"));
    scan.addFamily(Bytes.toBytes("trans-type"));
    scan.addFamily(Bytes.toBytes("trans-date"));
    scan.addFamily(Bytes.toBytes("trans-tags"));
    scan.addFamily(Bytes.toBytes("trans-group"));
    scan.setFilter(allFilters);
    return ht.getScanner(scan);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 5 with SingleColumnValueFilter

use of org.apache.hadoop.hbase.filter.SingleColumnValueFilter in project phoenix by apache.

the class PhoenixRuntimeIT method getUserTableAndViewsFilter.

private static Filter getUserTableAndViewsFilter() {
    SingleColumnValueFilter tableFilter = new SingleColumnValueFilter(TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES, CompareOp.EQUAL, Bytes.toBytes(PTableType.TABLE.getSerializedValue()));
    tableFilter.setFilterIfMissing(true);
    SingleColumnValueFilter viewFilter = new SingleColumnValueFilter(TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES, CompareOp.EQUAL, Bytes.toBytes(PTableType.VIEW.getSerializedValue()));
    viewFilter.setFilterIfMissing(true);
    FilterList filter = new FilterList(FilterList.Operator.MUST_PASS_ONE, Arrays.asList(new Filter[] { tableFilter, viewFilter }));
    return filter;
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FirstKeyOnlyFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter) Filter(org.apache.hadoop.hbase.filter.Filter) SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Aggregations

SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)71 Test (org.junit.Test)39 FilterList (org.apache.hadoop.hbase.filter.FilterList)28 BinaryComparator (org.apache.hadoop.hbase.filter.BinaryComparator)16 Scan (org.apache.hadoop.hbase.client.Scan)15 Put (org.apache.hadoop.hbase.client.Put)13 Result (org.apache.hadoop.hbase.client.Result)13 Filter (org.apache.hadoop.hbase.filter.Filter)12 Delete (org.apache.hadoop.hbase.client.Delete)8 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)8 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)8 TableName (org.apache.hadoop.hbase.TableName)7 Table (org.apache.hadoop.hbase.client.Table)7 BitComparator (org.apache.hadoop.hbase.filter.BitComparator)7 ArrayList (java.util.ArrayList)6 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)6 Get (org.apache.hadoop.hbase.client.Get)6 Mutation (org.apache.hadoop.hbase.client.Mutation)6 ByteArrayComparable (org.apache.hadoop.hbase.filter.ByteArrayComparable)6 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)6