use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class DistinctPrefixFilterTest method testVariableWithNull.
public void testVariableWithNull() throws Exception {
Filter f = createFilter(new int[] { -2, -2 }, 1);
assertInclude("\00aa", f);
assertSeekAndHint("\00aa", f, "\01");
assertSeekAndHint("\00aa", f, "\01");
f = createFilter(new int[] { -2, -2 }, 2);
assertInclude("\00\00", f);
assertSeekAndHint("\00\00", f, "\00\00\01");
assertSeekAndHint("\00\00", f, "\00\00\01");
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class DistinctPrefixFilterTest method testFixedAfterVariable.
public void testFixedAfterVariable() throws Exception {
Filter f = createFilter(new int[] { -5, 4 }, 1);
assertInclude("00000\00aaaa", f);
assertInclude("00001\00aaaa", f);
assertSeekAndHint("00001\00aaaa", f, "00001\01");
assertInclude("00003\00aaaa", f);
assertInclude("00004\00aaaa", f);
assertInclude("00005\00aaaa", f);
assertSeekAndHint("00005\00aaaa", f, "00005\01");
f = createFilter(new int[] { -5, 4 }, 2);
assertInclude("00000\00aaaa", f);
assertInclude("00001\00aaaa", f);
assertSeekAndHint("00001\00aaaa", f, "00001\00aaab");
assertInclude("00003\00aaaa", f);
assertInclude("00004\00aaaa", f);
assertInclude("00005\00aaaa", f);
assertSeekAndHint("00005\00aaaa", f, "00005\00aaab");
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class ScanUtil method intersectScanRange.
public static boolean intersectScanRange(Scan scan, byte[] startKey, byte[] stopKey, boolean useSkipScan) {
boolean mayHaveRows = false;
int offset = 0;
if (ScanUtil.isLocalIndex(scan)) {
offset = startKey.length != 0 ? startKey.length : stopKey.length;
}
byte[] existingStartKey = scan.getStartRow();
byte[] existingStopKey = scan.getStopRow();
if (existingStartKey.length > 0) {
if (startKey.length == 0 || Bytes.compareTo(existingStartKey, startKey) > 0) {
startKey = existingStartKey;
}
} else {
mayHaveRows = true;
}
if (existingStopKey.length > 0) {
if (stopKey.length == 0 || Bytes.compareTo(existingStopKey, stopKey) < 0) {
stopKey = existingStopKey;
}
} else {
mayHaveRows = true;
}
scan.setStartRow(startKey);
scan.setStopRow(stopKey);
if (offset > 0 && useSkipScan) {
byte[] temp = null;
if (startKey.length != 0) {
temp = new byte[startKey.length - offset];
System.arraycopy(startKey, offset, temp, 0, startKey.length - offset);
startKey = temp;
}
if (stopKey.length != 0) {
temp = new byte[stopKey.length - offset];
System.arraycopy(stopKey, offset, temp, 0, stopKey.length - offset);
stopKey = temp;
}
}
mayHaveRows = mayHaveRows || Bytes.compareTo(scan.getStartRow(), scan.getStopRow()) < 0;
// If the scan is using skip scan filter, intersect and replace the filter.
if (mayHaveRows && useSkipScan) {
Filter filter = scan.getFilter();
if (filter instanceof SkipScanFilter) {
SkipScanFilter oldFilter = (SkipScanFilter) filter;
SkipScanFilter newFilter = oldFilter.intersect(startKey, stopKey);
if (newFilter == null) {
return false;
}
// Intersect found: replace skip scan with intersected one
scan.setFilter(newFilter);
} else if (filter instanceof FilterList) {
FilterList oldList = (FilterList) filter;
FilterList newList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
for (Filter f : oldList.getFilters()) {
if (f instanceof SkipScanFilter) {
SkipScanFilter newFilter = ((SkipScanFilter) f).intersect(startKey, stopKey);
if (newFilter == null) {
return false;
}
newList.addFilter(newFilter);
} else {
newList.addFilter(f);
}
}
scan.setFilter(newList);
}
}
return mayHaveRows;
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereCompilerTest method testAndFilter.
@Test
public void testAndFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id=? and a_integer=0 and a_string='foo'";
List<Object> binds = Arrays.<Object>asList(tenantId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = newPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(multiEncodedKVFilter(and(constantComparison(CompareOp.EQUAL, A_INTEGER, 0), constantComparison(CompareOp.EQUAL, A_STRING, "foo")), TWO_BYTE_QUALIFIERS), filter);
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereCompilerTest method testAndPKAndNotPK.
@Test
public void testAndPKAndNotPK() throws SQLException {
String query = "select * from bugTable where ID = 'i2' and company = 'c3'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
pconn.createStatement().execute("create table bugTable(ID varchar primary key,company varchar)");
PhoenixPreparedStatement pstmt = newPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
PColumn column = plan.getTableRef().getTable().getColumnForColumnName("COMPANY");
assertEquals(singleKVFilter(constantComparison(CompareOp.EQUAL, new KeyValueColumnExpression(column), "c3")), filter);
}
Aggregations