use of org.apache.hadoop.hbase.filter.FilterList in project hbase by apache.
the class TestHRegion method testIndexesScanWithOneDeletedRow.
@Test
public void testIndexesScanWithOneDeletedRow() throws IOException {
byte[] family = Bytes.toBytes("family");
// Setting up region
this.region = initHRegion(tableName, method, CONF, family);
try {
Put put = new Put(Bytes.toBytes(1L));
put.addColumn(family, qual1, 1L, Bytes.toBytes(1L));
region.put(put);
region.flush(true);
Delete delete = new Delete(Bytes.toBytes(1L), 1L);
region.delete(delete);
put = new Put(Bytes.toBytes(2L));
put.addColumn(family, qual1, 2L, Bytes.toBytes(2L));
region.put(put);
Scan idxScan = new Scan();
idxScan.addFamily(family);
idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays.<Filter>asList(new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL, new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1, CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L))))));
InternalScanner scanner = region.getScanner(idxScan);
List<Cell> res = new ArrayList<>();
while (scanner.next(res)) ;
assertEquals(1L, res.size());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
use of org.apache.hadoop.hbase.filter.FilterList in project pinpoint by naver.
the class HbaseAgentEventDao method getAgentEvents.
@Override
public List<AgentEventBo> getAgentEvents(String agentId, Range range, Set<AgentEventType> excludeEventTypes) {
if (agentId == null) {
throw new NullPointerException("agentId must not be null");
}
if (range == null) {
throw new NullPointerException("range must not be null");
}
Scan scan = new Scan();
scan.setMaxVersions(1);
scan.setCaching(SCANNER_CACHE_SIZE);
scan.setStartRow(createRowKey(agentId, range.getTo()));
scan.setStopRow(createRowKey(agentId, range.getFrom()));
scan.addFamily(HBaseTables.AGENT_EVENT_CF_EVENTS);
if (!CollectionUtils.isEmpty(excludeEventTypes)) {
FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
for (AgentEventType excludeEventType : excludeEventTypes) {
byte[] excludeQualifier = Bytes.toBytes(excludeEventType.getCode());
filterList.addFilter(new QualifierFilter(CompareFilter.CompareOp.NOT_EQUAL, new BinaryComparator(excludeQualifier)));
}
scan.setFilter(filterList);
}
List<AgentEventBo> agentEvents = this.hbaseOperations2.find(HBaseTables.AGENT_EVENT, scan, agentEventResultsExtractor);
logger.debug("agentEvents found. {}", agentEvents);
return agentEvents;
}
use of org.apache.hadoop.hbase.filter.FilterList in project phoenix by apache.
the class PhoenixRuntimeIT method getUserTableAndViewsFilter.
private static Filter getUserTableAndViewsFilter() {
SingleColumnValueFilter tableFilter = new SingleColumnValueFilter(TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES, CompareOp.EQUAL, Bytes.toBytes(PTableType.TABLE.getSerializedValue()));
tableFilter.setFilterIfMissing(true);
SingleColumnValueFilter viewFilter = new SingleColumnValueFilter(TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.TABLE_TYPE_BYTES, CompareOp.EQUAL, Bytes.toBytes(PTableType.VIEW.getSerializedValue()));
viewFilter.setFilterIfMissing(true);
FilterList filter = new FilterList(FilterList.Operator.MUST_PASS_ONE, Arrays.asList(new Filter[] { tableFilter, viewFilter }));
return filter;
}
use of org.apache.hadoop.hbase.filter.FilterList in project phoenix by apache.
the class ScanUtil method intersectScanRange.
public static boolean intersectScanRange(Scan scan, byte[] startKey, byte[] stopKey, boolean useSkipScan) {
boolean mayHaveRows = false;
int offset = 0;
if (ScanUtil.isLocalIndex(scan)) {
offset = startKey.length != 0 ? startKey.length : stopKey.length;
}
byte[] existingStartKey = scan.getStartRow();
byte[] existingStopKey = scan.getStopRow();
if (existingStartKey.length > 0) {
if (startKey.length == 0 || Bytes.compareTo(existingStartKey, startKey) > 0) {
startKey = existingStartKey;
}
} else {
mayHaveRows = true;
}
if (existingStopKey.length > 0) {
if (stopKey.length == 0 || Bytes.compareTo(existingStopKey, stopKey) < 0) {
stopKey = existingStopKey;
}
} else {
mayHaveRows = true;
}
scan.setStartRow(startKey);
scan.setStopRow(stopKey);
if (offset > 0 && useSkipScan) {
byte[] temp = null;
if (startKey.length != 0) {
temp = new byte[startKey.length - offset];
System.arraycopy(startKey, offset, temp, 0, startKey.length - offset);
startKey = temp;
}
if (stopKey.length != 0) {
temp = new byte[stopKey.length - offset];
System.arraycopy(stopKey, offset, temp, 0, stopKey.length - offset);
stopKey = temp;
}
}
mayHaveRows = mayHaveRows || Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) < 0;
// If the scan is using skip scan filter, intersect and replace the filter.
if (mayHaveRows && useSkipScan) {
Filter filter = scan.getFilter();
if (filter instanceof SkipScanFilter) {
SkipScanFilter oldFilter = (SkipScanFilter) filter;
SkipScanFilter newFilter = oldFilter.intersect(startKey, stopKey);
if (newFilter == null) {
return false;
}
// Intersect found: replace skip scan with intersected one
scan.setFilter(newFilter);
} else if (filter instanceof FilterList) {
FilterList oldList = (FilterList) filter;
FilterList newList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
for (Filter f : oldList.getFilters()) {
if (f instanceof SkipScanFilter) {
SkipScanFilter newFilter = ((SkipScanFilter) f).intersect(startKey, stopKey);
if (newFilter == null) {
return false;
}
newList.addFilter(newFilter);
} else {
newList.addFilter(f);
}
}
scan.setFilter(newList);
}
}
return mayHaveRows;
}
use of org.apache.hadoop.hbase.filter.FilterList in project phoenix by apache.
the class ScannerBuilder method getColumnFilters.
/**
* @param columns columns to filter
* @return filter that will skip any {@link KeyValue} that doesn't match one of the passed columns
* and the
*/
private Filter getColumnFilters(Collection<? extends ColumnReference> columns) {
// each column needs to be added as an OR, so we need to separate them out
FilterList columnFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE);
// create a filter that matches each column reference
for (ColumnReference ref : columns) {
Filter columnFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ref.getFamily()));
// combine with a match for the qualifier, if the qualifier is a specific qualifier
if (!Bytes.equals(ColumnReference.ALL_QUALIFIERS, ref.getQualifier())) {
columnFilter = new FilterList(columnFilter, new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(ref.getQualifier())));
}
columnFilters.addFilter(columnFilter);
}
return columnFilters;
}
Aggregations