Search in sources :

Example 1 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestScannersWithFilters method testPrefixFilter.

@Test
public void testPrefixFilter() throws Exception {
    // Grab rows from group one (half of total)
    long expectedRows = numRows / 2;
    long expectedKeys = colsPerRow;
    Scan s = new Scan();
    s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
    verifyScan(s, expectedRows, expectedKeys);
}
Also used : PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 2 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestFromClientSide method buildScanner.

private ResultScanner buildScanner(String keyPrefix, String value, Table ht) throws IOException {
    // OurFilterList allFilters = new OurFilterList();
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
    // allFilters.addFilter(new
    // RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
    // Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes("trans-blob"));
    scan.addFamily(Bytes.toBytes("trans-type"));
    scan.addFamily(Bytes.toBytes("trans-date"));
    scan.addFamily(Bytes.toBytes("trans-tags"));
    scan.addFamily(Bytes.toBytes("trans-group"));
    scan.setFilter(allFilters);
    return ht.getScanner(scan);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 3 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestHRegion method buildScanner.

private InternalScanner buildScanner(String keyPrefix, String value, HRegion r) throws IOException {
    // Defaults FilterList.Operator.MUST_PASS_ALL.
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    // Only return rows where this column value exists in the row.
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes("trans-blob"));
    scan.addFamily(Bytes.toBytes("trans-type"));
    scan.addFamily(Bytes.toBytes("trans-date"));
    scan.addFamily(Bytes.toBytes("trans-tags"));
    scan.addFamily(Bytes.toBytes("trans-group"));
    scan.setFilter(allFilters);
    return r.getScanner(scan);
}
Also used : SingleColumnValueFilter(org.apache.hadoop.hbase.filter.SingleColumnValueFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) FilterList(org.apache.hadoop.hbase.filter.FilterList) Scan(org.apache.hadoop.hbase.client.Scan)

Example 4 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestScanner method testFilters.

@Test
public void testFilters() throws IOException {
    try {
        this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
        HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
        byte[] prefix = Bytes.toBytes("ab");
        Filter newFilter = new PrefixFilter(prefix);
        Scan scan = new Scan();
        scan.setFilter(newFilter);
        rowPrefixFilter(scan);
        byte[] stopRow = Bytes.toBytes("bbc");
        newFilter = new WhileMatchFilter(new InclusiveStopFilter(stopRow));
        scan = new Scan();
        scan.setFilter(newFilter);
        rowInclusiveStopFilter(scan, stopRow);
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(this.region);
    }
}
Also used : PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) InclusiveStopFilter(org.apache.hadoop.hbase.filter.InclusiveStopFilter) WhileMatchFilter(org.apache.hadoop.hbase.filter.WhileMatchFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) WhileMatchFilter(org.apache.hadoop.hbase.filter.WhileMatchFilter) InclusiveStopFilter(org.apache.hadoop.hbase.filter.InclusiveStopFilter) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 5 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TableResource method getScanResource.

@Path("{scanspec: .*[*]$}")
public TableScanResource getScanResource(@Context final UriInfo uriInfo, @PathParam("scanspec") final String scanSpec, @HeaderParam("Accept") final String contentType, @DefaultValue(Integer.MAX_VALUE + "") @QueryParam(Constants.SCAN_LIMIT) int userRequestedLimit, @DefaultValue("") @QueryParam(Constants.SCAN_START_ROW) String startRow, @DefaultValue("") @QueryParam(Constants.SCAN_END_ROW) String endRow, @DefaultValue("") @QueryParam(Constants.SCAN_COLUMN) List<String> column, @DefaultValue("1") @QueryParam(Constants.SCAN_MAX_VERSIONS) int maxVersions, @DefaultValue("-1") @QueryParam(Constants.SCAN_BATCH_SIZE) int batchSize, @DefaultValue("0") @QueryParam(Constants.SCAN_START_TIME) long startTime, @DefaultValue(Long.MAX_VALUE + "") @QueryParam(Constants.SCAN_END_TIME) long endTime, @DefaultValue("true") @QueryParam(Constants.SCAN_BATCH_SIZE) boolean cacheBlocks, @DefaultValue("false") @QueryParam(Constants.SCAN_REVERSED) boolean reversed, @DefaultValue("") @QueryParam(Constants.SCAN_FILTER) String filters) {
    try {
        Filter filter = null;
        Scan tableScan = new Scan();
        if (scanSpec.indexOf('*') > 0) {
            String prefix = scanSpec.substring(0, scanSpec.indexOf('*'));
            byte[] prefixBytes = Bytes.toBytes(prefix);
            filter = new PrefixFilter(Bytes.toBytes(prefix));
            if (startRow.isEmpty()) {
                tableScan.setStartRow(prefixBytes);
            }
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("Query parameters  : Table Name = > " + this.table + " Start Row => " + startRow + " End Row => " + endRow + " Columns => " + column + " Start Time => " + startTime + " End Time => " + endTime + " Cache Blocks => " + cacheBlocks + " Max Versions => " + maxVersions + " Batch Size => " + batchSize);
        }
        Table hTable = RESTServlet.getInstance().getTable(this.table);
        tableScan.setBatch(batchSize);
        tableScan.setMaxVersions(maxVersions);
        tableScan.setTimeRange(startTime, endTime);
        if (!startRow.isEmpty()) {
            tableScan.setStartRow(Bytes.toBytes(startRow));
        }
        tableScan.setStopRow(Bytes.toBytes(endRow));
        for (String csplit : column) {
            String[] familysplit = csplit.trim().split(":");
            if (familysplit.length == 2) {
                if (familysplit[1].length() > 0) {
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family and column : " + familysplit[0] + "  " + familysplit[1]);
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), Bytes.toBytes(familysplit[1]));
                } else {
                    tableScan.addFamily(Bytes.toBytes(familysplit[0]));
                    if (LOG.isTraceEnabled()) {
                        LOG.trace("Scan family : " + familysplit[0] + " and empty qualifier.");
                    }
                    tableScan.addColumn(Bytes.toBytes(familysplit[0]), null);
                }
            } else if (StringUtils.isNotEmpty(familysplit[0])) {
                if (LOG.isTraceEnabled()) {
                    LOG.trace("Scan family : " + familysplit[0]);
                }
                tableScan.addFamily(Bytes.toBytes(familysplit[0]));
            }
        }
        FilterList filterList = null;
        if (StringUtils.isNotEmpty(filters)) {
            ParseFilter pf = new ParseFilter();
            Filter filterParam = pf.parseFilterString(filters);
            if (filter != null) {
                filterList = new FilterList(filter, filterParam);
            } else {
                filter = filterParam;
            }
        }
        if (filterList != null) {
            tableScan.setFilter(filterList);
        } else if (filter != null) {
            tableScan.setFilter(filter);
        }
        int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
        tableScan.setCaching(fetchSize);
        tableScan.setReversed(reversed);
        return new TableScanResource(hTable.getScanner(tableScan), userRequestedLimit);
    } catch (IOException exp) {
        servlet.getMetrics().incrementFailedScanRequests(1);
        processException(exp);
        LOG.warn(exp);
        return null;
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) Path(javax.ws.rs.Path)

Aggregations

PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)13 Filter (org.apache.hadoop.hbase.filter.Filter)6 Scan (org.apache.hadoop.hbase.client.Scan)5 FilterList (org.apache.hadoop.hbase.filter.FilterList)4 Test (org.junit.Test)4 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)3 DateExpression (org.apache.drill.common.expression.ValueExpressions.DateExpression)2 IntExpression (org.apache.drill.common.expression.ValueExpressions.IntExpression)2 LongExpression (org.apache.drill.common.expression.ValueExpressions.LongExpression)2 QuotedString (org.apache.drill.common.expression.ValueExpressions.QuotedString)2 TimeExpression (org.apache.drill.common.expression.ValueExpressions.TimeExpression)2 TimeStampExpression (org.apache.drill.common.expression.ValueExpressions.TimeStampExpression)2 Table (org.apache.hadoop.hbase.client.Table)2 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)2 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)2 IOException (java.io.IOException)1 Map (java.util.Map)1 NavigableSet (java.util.NavigableSet)1 Path (javax.ws.rs.Path)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1