Search in sources :

Example 6 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class VerifyReplication method setRowPrefixFilter.

private static void setRowPrefixFilter(Scan scan, String rowPrefixes) {
    if (rowPrefixes != null && !rowPrefixes.isEmpty()) {
        String[] rowPrefixArray = rowPrefixes.split(",");
        Arrays.sort(rowPrefixArray);
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
        for (String prefix : rowPrefixArray) {
            Filter filter = new PrefixFilter(Bytes.toBytes(prefix));
            filterList.addFilter(filter);
        }
        scan.setFilter(filterList);
        byte[] startPrefixRow = Bytes.toBytes(rowPrefixArray[0]);
        byte[] lastPrefixRow = Bytes.toBytes(rowPrefixArray[rowPrefixArray.length - 1]);
        setStartAndStopRows(scan, startPrefixRow, lastPrefixRow);
    }
}
Also used : PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) FilterList(org.apache.hadoop.hbase.filter.FilterList)

Example 7 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestImportExport method testWithFilter.

/**
   * Create a simple table, run an Export Job on it, Import with filtering on,  verify counts,
   * attempt with invalid values.
   */
@Test
public void testWithFilter() throws Exception {
    // Create simple table to export
    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
    UTIL.getAdmin().createTable(desc);
    Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
    Put p1 = new Put(ROW1);
    p1.addColumn(FAMILYA, QUAL, now, QUAL);
    p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
    p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
    p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
    p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
    // Having another row would actually test the filter.
    Put p2 = new Put(ROW2);
    p2.addColumn(FAMILYA, QUAL, now, QUAL);
    exportTable.put(Arrays.asList(p1, p2));
    // Export the simple table
    String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
    assertTrue(runExport(args));
    // Import to a new table
    final String IMPORT_TABLE = name.getMethodName() + "import";
    desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
    UTIL.getAdmin().createTable(desc);
    Table importTable = UTIL.getConnection().getTable(desc.getTableName());
    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR, "1000" };
    assertTrue(runImport(args));
    // get the count of the source table for that time range
    PrefixFilter filter = new PrefixFilter(ROW1);
    int count = getCount(exportTable, filter);
    Assert.assertEquals("Unexpected row count between export and import tables", count, getCount(importTable, null));
    // and then test that a broken command doesn't bork everything - easier here because we don't
    // need to re-run the export job
    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
    assertFalse(runImport(args));
    // cleanup
    exportTable.close();
    importTable.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 8 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class Export method getExportFilter.

private static Filter getExportFilter(String[] args) {
    Filter exportFilter = null;
    String filterCriteria = (args.length > 5) ? args[5] : null;
    if (filterCriteria == null)
        return null;
    if (filterCriteria.startsWith("^")) {
        String regexPattern = filterCriteria.substring(1, filterCriteria.length());
        exportFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator(regexPattern));
    } else {
        exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
    }
    return exportFilter;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) Filter(org.apache.hadoop.hbase.filter.Filter)

Example 9 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class CellCounter method getRowFilter.

private static Filter getRowFilter(String[] args) {
    Filter rowFilter = null;
    String filterCriteria = (args.length > 3) ? args[3] : null;
    if (filterCriteria == null)
        return null;
    if (filterCriteria.startsWith("^")) {
        String regexPattern = filterCriteria.substring(1, filterCriteria.length());
        rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regexPattern));
    } else {
        rowFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
    }
    return rowFilter;
}
Also used : RegexStringComparator(org.apache.hadoop.hbase.filter.RegexStringComparator) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) CompareFilter(org.apache.hadoop.hbase.filter.CompareFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter)

Example 10 with PrefixFilter

use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.

the class TestSerialization method testScan.

@Test
public void testScan() throws Exception {
    byte[] startRow = "startRow".getBytes();
    byte[] stopRow = "stopRow".getBytes();
    byte[] fam = "fam".getBytes();
    byte[] qf1 = "qf1".getBytes();
    long ts = System.currentTimeMillis();
    int maxVersions = 2;
    Scan scan = new Scan(startRow, stopRow);
    scan.addColumn(fam, qf1);
    scan.setTimeRange(ts, ts + 1);
    scan.setMaxVersions(maxVersions);
    ClientProtos.Scan scanProto = ProtobufUtil.toScan(scan);
    Scan desScan = ProtobufUtil.toScan(scanProto);
    assertTrue(Bytes.equals(scan.getStartRow(), desScan.getStartRow()));
    assertTrue(Bytes.equals(scan.getStopRow(), desScan.getStopRow()));
    assertEquals(scan.getCacheBlocks(), desScan.getCacheBlocks());
    Set<byte[]> set = null;
    Set<byte[]> desSet = null;
    for (Map.Entry<byte[], NavigableSet<byte[]>> entry : scan.getFamilyMap().entrySet()) {
        assertTrue(desScan.getFamilyMap().containsKey(entry.getKey()));
        set = entry.getValue();
        desSet = desScan.getFamilyMap().get(entry.getKey());
        for (byte[] column : set) {
            assertTrue(desSet.contains(column));
        }
        // Test filters are serialized properly.
        scan = new Scan(startRow);
        final String name = "testScan";
        byte[] prefix = Bytes.toBytes(name);
        scan.setFilter(new PrefixFilter(prefix));
        scanProto = ProtobufUtil.toScan(scan);
        desScan = ProtobufUtil.toScan(scanProto);
        Filter f = desScan.getFilter();
        assertTrue(f instanceof PrefixFilter);
    }
    assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());
    TimeRange tr = scan.getTimeRange();
    TimeRange desTr = desScan.getTimeRange();
    assertEquals(tr.getMax(), desTr.getMax());
    assertEquals(tr.getMin(), desTr.getMin());
}
Also used : NavigableSet(java.util.NavigableSet) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) TimeRange(org.apache.hadoop.hbase.io.TimeRange) RowFilter(org.apache.hadoop.hbase.filter.RowFilter) PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Filter(org.apache.hadoop.hbase.filter.Filter) Scan(org.apache.hadoop.hbase.client.Scan) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) Map(java.util.Map) Test(org.junit.Test)

Aggregations

PrefixFilter (org.apache.hadoop.hbase.filter.PrefixFilter)13 Filter (org.apache.hadoop.hbase.filter.Filter)6 Scan (org.apache.hadoop.hbase.client.Scan)5 FilterList (org.apache.hadoop.hbase.filter.FilterList)4 Test (org.junit.Test)4 RowFilter (org.apache.hadoop.hbase.filter.RowFilter)3 DateExpression (org.apache.drill.common.expression.ValueExpressions.DateExpression)2 IntExpression (org.apache.drill.common.expression.ValueExpressions.IntExpression)2 LongExpression (org.apache.drill.common.expression.ValueExpressions.LongExpression)2 QuotedString (org.apache.drill.common.expression.ValueExpressions.QuotedString)2 TimeExpression (org.apache.drill.common.expression.ValueExpressions.TimeExpression)2 TimeStampExpression (org.apache.drill.common.expression.ValueExpressions.TimeStampExpression)2 Table (org.apache.hadoop.hbase.client.Table)2 RegexStringComparator (org.apache.hadoop.hbase.filter.RegexStringComparator)2 SingleColumnValueFilter (org.apache.hadoop.hbase.filter.SingleColumnValueFilter)2 IOException (java.io.IOException)1 Map (java.util.Map)1 NavigableSet (java.util.NavigableSet)1 Path (javax.ws.rs.Path)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1