use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.
the class VerifyReplication method setRowPrefixFilter.
private static void setRowPrefixFilter(Scan scan, String rowPrefixes) {
if (rowPrefixes != null && !rowPrefixes.isEmpty()) {
String[] rowPrefixArray = rowPrefixes.split(",");
Arrays.sort(rowPrefixArray);
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
for (String prefix : rowPrefixArray) {
Filter filter = new PrefixFilter(Bytes.toBytes(prefix));
filterList.addFilter(filter);
}
scan.setFilter(filterList);
byte[] startPrefixRow = Bytes.toBytes(rowPrefixArray[0]);
byte[] lastPrefixRow = Bytes.toBytes(rowPrefixArray[rowPrefixArray.length - 1]);
setStartAndStopRows(scan, startPrefixRow, lastPrefixRow);
}
}
use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.
the class TestImportExport method testWithFilter.
/**
* Create a simple table, run an Export Job on it, Import with filtering on, verify counts,
* attempt with invalid values.
*/
@Test
public void testWithFilter() throws Exception {
// Create simple table to export
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getAdmin().createTable(desc);
Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
Put p1 = new Put(ROW1);
p1.addColumn(FAMILYA, QUAL, now, QUAL);
p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
// Having another row would actually test the filter.
Put p2 = new Put(ROW2);
p2.addColumn(FAMILYA, QUAL, now, QUAL);
exportTable.put(Arrays.asList(p1, p2));
// Export the simple table
String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
assertTrue(runExport(args));
// Import to a new table
final String IMPORT_TABLE = name.getMethodName() + "import";
desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
UTIL.getAdmin().createTable(desc);
Table importTable = UTIL.getConnection().getTable(desc.getTableName());
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR, "1000" };
assertTrue(runImport(args));
// get the count of the source table for that time range
PrefixFilter filter = new PrefixFilter(ROW1);
int count = getCount(exportTable, filter);
Assert.assertEquals("Unexpected row count between export and import tables", count, getCount(importTable, null));
// and then test that a broken command doesn't bork everything - easier here because we don't
// need to re-run the export job
args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name.getMethodName(), FQ_OUTPUT_DIR, "1000" };
assertFalse(runImport(args));
// cleanup
exportTable.close();
importTable.close();
}
use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.
the class Export method getExportFilter.
private static Filter getExportFilter(String[] args) {
Filter exportFilter = null;
String filterCriteria = (args.length > 5) ? args[5] : null;
if (filterCriteria == null)
return null;
if (filterCriteria.startsWith("^")) {
String regexPattern = filterCriteria.substring(1, filterCriteria.length());
exportFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator(regexPattern));
} else {
exportFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
}
return exportFilter;
}
use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.
the class CellCounter method getRowFilter.
private static Filter getRowFilter(String[] args) {
Filter rowFilter = null;
String filterCriteria = (args.length > 3) ? args[3] : null;
if (filterCriteria == null)
return null;
if (filterCriteria.startsWith("^")) {
String regexPattern = filterCriteria.substring(1, filterCriteria.length());
rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(regexPattern));
} else {
rowFilter = new PrefixFilter(Bytes.toBytesBinary(filterCriteria));
}
return rowFilter;
}
use of org.apache.hadoop.hbase.filter.PrefixFilter in project hbase by apache.
the class TestSerialization method testScan.
@Test
public void testScan() throws Exception {
byte[] startRow = "startRow".getBytes();
byte[] stopRow = "stopRow".getBytes();
byte[] fam = "fam".getBytes();
byte[] qf1 = "qf1".getBytes();
long ts = System.currentTimeMillis();
int maxVersions = 2;
Scan scan = new Scan(startRow, stopRow);
scan.addColumn(fam, qf1);
scan.setTimeRange(ts, ts + 1);
scan.setMaxVersions(maxVersions);
ClientProtos.Scan scanProto = ProtobufUtil.toScan(scan);
Scan desScan = ProtobufUtil.toScan(scanProto);
assertTrue(Bytes.equals(scan.getStartRow(), desScan.getStartRow()));
assertTrue(Bytes.equals(scan.getStopRow(), desScan.getStopRow()));
assertEquals(scan.getCacheBlocks(), desScan.getCacheBlocks());
Set<byte[]> set = null;
Set<byte[]> desSet = null;
for (Map.Entry<byte[], NavigableSet<byte[]>> entry : scan.getFamilyMap().entrySet()) {
assertTrue(desScan.getFamilyMap().containsKey(entry.getKey()));
set = entry.getValue();
desSet = desScan.getFamilyMap().get(entry.getKey());
for (byte[] column : set) {
assertTrue(desSet.contains(column));
}
// Test filters are serialized properly.
scan = new Scan(startRow);
final String name = "testScan";
byte[] prefix = Bytes.toBytes(name);
scan.setFilter(new PrefixFilter(prefix));
scanProto = ProtobufUtil.toScan(scan);
desScan = ProtobufUtil.toScan(scanProto);
Filter f = desScan.getFilter();
assertTrue(f instanceof PrefixFilter);
}
assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());
TimeRange tr = scan.getTimeRange();
TimeRange desTr = desScan.getTimeRange();
assertEquals(tr.getMax(), desTr.getMax());
assertEquals(tr.getMin(), desTr.getMin());
}
Aggregations