use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class ScannerBuilder method getColumnFilters.
/**
* @param columns columns to filter
* @return filter that will skip any {@link KeyValue} that doesn't match one of the passed columns
* and the
*/
private Filter getColumnFilters(Collection<? extends ColumnReference> columns) {
// each column needs to be added as an OR, so we need to separate them out
FilterList columnFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE);
// create a filter that matches each column reference
for (ColumnReference ref : columns) {
Filter columnFilter = new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(ref.getFamily()));
// combine with a match for the qualifier, if the qualifier is a specific qualifier
if (!Bytes.equals(ColumnReference.ALL_QUALIFIERS, ref.getQualifier())) {
columnFilter = new FilterList(columnFilter, new QualifierFilter(CompareOp.EQUAL, new BinaryComparator(ref.getQualifier())));
}
columnFilters.addFilter(columnFilter);
}
return columnFilters;
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereOptimizerTest method testOrDiffColExpression.
@Test
public void testOrDiffColExpression() throws SQLException {
String tenantId1 = "000000000000001";
String entityId1 = "002333333333331";
String query = "select * from atable where organization_id = ? or entity_id = ?";
List<Object> binds = Arrays.<Object>asList(tenantId1, entityId1);
StatementContext context = compileStatement(query, binds);
Scan scan = context.getScan();
Filter filter = scan.getFilter();
assertNotNull(filter);
assertTrue(filter instanceof RowKeyComparisonFilter);
ScanRanges scanRanges = context.getScanRanges();
assertEquals(ScanRanges.EVERYTHING, scanRanges);
assertArrayEquals(HConstants.EMPTY_START_ROW, scan.getStartRow());
assertArrayEquals(HConstants.EMPTY_END_ROW, scan.getStopRow());
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereOptimizerTest method testTrailingIsNullWithOr.
@Test
public void testTrailingIsNullWithOr() throws Exception {
String baseTableDDL = "CREATE TABLE t(\n " + " a VARCHAR,\n" + " b VARCHAR,\n" + " CONSTRAINT pk PRIMARY KEY (a, b))";
Connection conn = DriverManager.getConnection(getUrl());
conn.createStatement().execute(baseTableDDL);
conn.close();
String query = "SELECT * FROM t WHERE a = 'a' and (b is null or b = 'b')";
StatementContext context = compileStatement(query, Collections.<Object>emptyList());
Scan scan = context.getScan();
Filter filter = scan.getFilter();
assertTrue(filter instanceof SkipScanFilter);
SkipScanFilter skipScan = (SkipScanFilter) filter;
List<List<KeyRange>> slots = skipScan.getSlots();
assertEquals(2, slots.size());
assertEquals(1, slots.get(0).size());
assertEquals(2, slots.get(1).size());
assertEquals(KeyRange.getKeyRange(Bytes.toBytes("a")), slots.get(0).get(0));
assertTrue(KeyRange.IS_NULL_RANGE == slots.get(1).get(0));
assertEquals(KeyRange.getKeyRange(Bytes.toBytes("b")), slots.get(1).get(1));
assertArrayEquals(Bytes.toBytes("a"), scan.getStartRow());
assertArrayEquals(ByteUtil.concat(Bytes.toBytes("a"), QueryConstants.SEPARATOR_BYTE_ARRAY, Bytes.toBytes("b"), QueryConstants.SEPARATOR_BYTE_ARRAY), scan.getStopRow());
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereOptimizerTest method testLikeOptKeyExpression2.
@Test
public void testLikeOptKeyExpression2() throws SQLException {
String tenantId = "000000000000001";
String keyPrefix = "002";
String likeArg = keyPrefix + "%003%";
String query = "select * from atable where organization_id = ? and substr(entity_id,1,10) LIKE '" + likeArg + "'";
List<Object> binds = Arrays.<Object>asList(tenantId);
StatementContext context = compileStatement(query, binds);
Scan scan = context.getScan();
Filter filter = scan.getFilter();
assertNotNull(filter);
assertEquals(rowKeyFilter(like(substr(ENTITY_ID, 1, 10), likeArg, context)), filter);
byte[] startRow = ByteUtil.concat(PVarchar.INSTANCE.toBytes(tenantId), StringUtil.padChar(PVarchar.INSTANCE.toBytes(keyPrefix), 15));
byte[] stopRow = ByteUtil.concat(PVarchar.INSTANCE.toBytes(tenantId), StringUtil.padChar(ByteUtil.nextKey(PVarchar.INSTANCE.toBytes(keyPrefix)), 15));
assertArrayEquals(startRow, scan.getStartRow());
assertArrayEquals(stopRow, scan.getStopRow());
}
use of org.apache.hadoop.hbase.filter.Filter in project phoenix by apache.
the class WhereOptimizerTest method testUseOfFunctionOnLHSInRVC.
@Test
public void testUseOfFunctionOnLHSInRVC() throws SQLException {
String tenantId = "000000000000001";
String subStringTenantId = tenantId.substring(0, 3);
String parentId = "000000000000002";
Date createdDate = new Date(System.currentTimeMillis());
String query = "select * from entity_history where (substr(organization_id, 1, 3), parent_id, created_date) >= (?,?,?)";
List<Object> binds = Arrays.<Object>asList(subStringTenantId, parentId, createdDate);
StatementContext context = compileStatement(query, binds);
Scan scan = context.getScan();
Filter filter = scan.getFilter();
assertNotNull(filter);
assertTrue(filter instanceof RowKeyComparisonFilter);
byte[] expectedStartRow = PVarchar.INSTANCE.toBytes(subStringTenantId);
assertArrayEquals(expectedStartRow, scan.getStartRow());
assertArrayEquals(HConstants.EMPTY_END_ROW, scan.getStopRow());
}
Aggregations