use of org.apache.hadoop.hbase.filter.RowFilter in project hive by apache.
the class SampleHBasePredicateDecomposer method getScanRange.
@Override
public HBaseScanRange getScanRange(List<IndexSearchCondition> searchConditions) throws Exception {
Map<String, List<IndexSearchCondition>> fieldConds = new HashMap<String, List<IndexSearchCondition>>();
for (IndexSearchCondition condition : searchConditions) {
String fieldName = condition.getFields()[0];
List<IndexSearchCondition> fieldCond = fieldConds.get(fieldName);
if (fieldCond == null) {
fieldConds.put(fieldName, fieldCond = new ArrayList<IndexSearchCondition>());
}
fieldCond.add(condition);
}
List<Filter> filters = new ArrayList<Filter>();
HBaseScanRange range = new HBaseScanRange();
StructTypeInfo type = (StructTypeInfo) keyMapping.columnType;
for (String name : type.getAllStructFieldNames()) {
List<IndexSearchCondition> fieldCond = fieldConds.get(name);
if (fieldCond == null || fieldCond.size() > 2) {
continue;
}
for (IndexSearchCondition condition : fieldCond) {
if (condition.getConstantDesc().getValue() == null) {
continue;
}
String comparisonOp = condition.getComparisonOp();
String constantVal = String.valueOf(condition.getConstantDesc().getValue());
byte[] valueAsBytes = toBinary(constantVal, FIXED_LENGTH, false, false);
if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) {
filters.add(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPGreaterThan")) {
filters.add(new RowFilter(CompareOp.GREATER, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) {
filters.add(new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPLessThan")) {
filters.add(new RowFilter(CompareOp.LESS, new BinaryComparator(valueAsBytes)));
} else {
throw new IOException(comparisonOp + " is not a supported comparison operator");
}
}
}
if (!filters.isEmpty()) {
range.addFilter(new FilterList(Operator.MUST_PASS_ALL, filters));
}
return range;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project uavstack by uavorg.
the class HBaseDataStore method query.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
protected List query(DataStoreMsg msg) {
log.info(this, "DataStore Query Log data start");
msg = (DataStoreMsg) adaptor.prepareQueryObj(msg, datasource.getDataStoreConnection());
// 根据TABLE名取得table实例
String tableName = (String) msg.get(DataStoreProtocol.HBASE_TABLE_NAME);
// 根据family名取得scaner实例
String cfName = (String) msg.get(DataStoreProtocol.HBASE_FAMILY_NAME);
Scan scan = null;
List<byte[]> kv = null;
try (Table table = datasource.getSourceConnect().getTable(TableName.valueOf(tableName))) {
scan = new Scan();
DataStoreConnection con = datasource.getDataStoreConnection();
scan.setCaching(Integer.parseInt((String) con.getContext(DataStoreProtocol.HBASE_QUERY_CACHING)));
scan.setMaxResultSize(Long.parseLong((String) con.getContext(DataStoreProtocol.HBASE_QUERY_MAXRESULTSIZE)));
scan.setReversed((boolean) msg.get(DataStoreProtocol.HBASE_QUERY_REVERSE));
scan.addFamily(cfName.getBytes("UTF-8"));
// 根据查询信息构建实例,目前支持rowkey过滤
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ALL);
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_STARTROW)) {
scan.setStartRow((byte[]) msg.get(DataStoreProtocol.HBASE_QUERY_STARTROW));
}
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_ENDROW)) {
scan.setStopRow((byte[]) msg.get(DataStoreProtocol.HBASE_QUERY_ENDROW));
}
if (msg.containsKey(DataStoreProtocol.HBASE_QUERY_ROW_KEYVALUE)) {
kv = (List<byte[]>) msg.get(DataStoreProtocol.HBASE_QUERY_ROW_KEYVALUE);
for (byte[] b : kv) {
flist.addFilter(new RowFilter(CompareOp.EQUAL, new SubstringComparator(new String(b))));
}
}
flist.addFilter(new PageFilter((long) msg.get(DataStoreProtocol.HBASE_QUERY_PAGESIZE)));
scan.setFilter(flist);
log.info(this, "DataStore Query Log data: getFilter String:" + scan.getFilter().toString());
try (ResultScanner result = table.getScanner(scan)) {
List<NavigableMap<byte[], byte[]>> resultList = Lists.newArrayList();
for (Result r : result) {
NavigableMap<byte[], byte[]> map = r.getFamilyMap(cfName.getBytes());
map.put("_timestamp".getBytes(), String.valueOf(r.rawCells()[0].getTimestamp()).getBytes());
resultList.add(map);
}
return adaptor.handleQueryResult(resultList, msg, datasource.getDataStoreConnection());
}
} catch (IOException e) {
log.err(this, "QUERY HBASE TABLE[" + tableName + "] FAMILY[" + cfName + "] FAIL:" + msg.toJSONString(), e);
return null;
}
}
use of org.apache.hadoop.hbase.filter.RowFilter in project drill by axbaretto.
the class MapRDBFilterBuilder method createHBaseScanSpec.
private HBaseScanSpec createHBaseScanSpec(FunctionCall call, MaprDBCompareFunctionsProcessor processor) {
String functionName = processor.getFunctionName();
SchemaPath field = processor.getPath();
byte[] fieldValue = processor.getValue();
boolean sortOrderAscending = processor.isSortOrderAscending();
boolean isRowKey = field.getRootSegmentPath().equals(ROW_KEY);
if (!(isRowKey || (!field.getRootSegment().isLastPath() && field.getRootSegment().getChild().isLastPath() && field.getRootSegment().getChild().isNamed()))) {
/*
* if the field in this function is neither the row_key nor a qualified HBase column, return.
*/
return null;
}
if (processor.isRowKeyPrefixComparison()) {
return createRowKeyPrefixScanSpec(call, processor);
}
CompareOp compareOp = null;
boolean isNullTest = false;
ByteArrayComparable comparator = new BinaryComparator(fieldValue);
byte[] startRow = HConstants.EMPTY_START_ROW;
byte[] stopRow = HConstants.EMPTY_END_ROW;
switch(functionName) {
case "equal":
compareOp = CompareOp.EQUAL;
if (isRowKey) {
startRow = fieldValue;
/* stopRow should be just greater than 'value'*/
stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
compareOp = CompareOp.EQUAL;
}
break;
case "not_equal":
compareOp = CompareOp.NOT_EQUAL;
break;
case "greater_than_or_equal_to":
if (sortOrderAscending) {
compareOp = CompareOp.GREATER_OR_EQUAL;
if (isRowKey) {
startRow = fieldValue;
}
} else {
compareOp = CompareOp.LESS_OR_EQUAL;
if (isRowKey) {
// stopRow should be just greater than 'value'
stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
}
}
break;
case "greater_than":
if (sortOrderAscending) {
compareOp = CompareOp.GREATER;
if (isRowKey) {
// startRow should be just greater than 'value'
startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
}
} else {
compareOp = CompareOp.LESS;
if (isRowKey) {
stopRow = fieldValue;
}
}
break;
case "less_than_or_equal_to":
if (sortOrderAscending) {
compareOp = CompareOp.LESS_OR_EQUAL;
if (isRowKey) {
// stopRow should be just greater than 'value'
stopRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
}
} else {
compareOp = CompareOp.GREATER_OR_EQUAL;
if (isRowKey) {
startRow = fieldValue;
}
}
break;
case "less_than":
if (sortOrderAscending) {
compareOp = CompareOp.LESS;
if (isRowKey) {
stopRow = fieldValue;
}
} else {
compareOp = CompareOp.GREATER;
if (isRowKey) {
// startRow should be just greater than 'value'
startRow = Arrays.copyOf(fieldValue, fieldValue.length + 1);
}
}
break;
case "isnull":
case "isNull":
case "is null":
if (isRowKey) {
return null;
}
isNullTest = true;
compareOp = CompareOp.EQUAL;
comparator = new NullComparator();
break;
case "isnotnull":
case "isNotNull":
case "is not null":
if (isRowKey) {
return null;
}
compareOp = CompareOp.NOT_EQUAL;
comparator = new NullComparator();
break;
case "like":
/*
* Convert the LIKE operand to Regular Expression pattern so that we can
* apply RegexStringComparator()
*/
HBaseRegexParser parser = new HBaseRegexParser(call).parse();
compareOp = CompareOp.EQUAL;
comparator = new RegexStringComparator(parser.getRegexString());
/*
* We can possibly do better if the LIKE operator is on the row_key
*/
if (isRowKey) {
String prefix = parser.getPrefixString();
if (prefix != null) {
/*
* If there is a literal prefix, it can help us prune the scan to a sub range
*/
if (prefix.equals(parser.getLikeString())) {
/* The operand value is literal. This turns the LIKE operator to EQUAL operator */
startRow = stopRow = fieldValue;
compareOp = null;
} else {
startRow = prefix.getBytes(Charsets.UTF_8);
stopRow = startRow.clone();
boolean isMaxVal = true;
for (int i = stopRow.length - 1; i >= 0; --i) {
int nextByteValue = (0xff & stopRow[i]) + 1;
if (nextByteValue < 0xff) {
stopRow[i] = (byte) nextByteValue;
isMaxVal = false;
break;
} else {
stopRow[i] = 0;
}
}
if (isMaxVal) {
stopRow = HConstants.EMPTY_END_ROW;
}
}
}
}
break;
}
if (compareOp != null || startRow != HConstants.EMPTY_START_ROW || stopRow != HConstants.EMPTY_END_ROW) {
Filter filter = null;
if (isRowKey) {
if (compareOp != null) {
filter = new RowFilter(compareOp, comparator);
}
} else {
byte[] family = HBaseUtils.getBytes(field.getRootSegment().getPath());
byte[] qualifier = HBaseUtils.getBytes(field.getRootSegment().getChild().getNameSegment().getPath());
filter = new SingleColumnValueFilter(family, qualifier, compareOp, comparator);
((SingleColumnValueFilter) filter).setLatestVersionOnly(true);
if (!isNullTest) {
((SingleColumnValueFilter) filter).setFilterIfMissing(true);
}
}
return new HBaseScanSpec(groupScan.getTableName(), startRow, stopRow, filter);
}
// else
return null;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project drill by axbaretto.
the class MapRDBFilterBuilder method parseTree.
public HBaseScanSpec parseTree() {
HBaseScanSpec parsedSpec = le.accept(this, null);
if (parsedSpec != null) {
parsedSpec = mergeScanSpecs("booleanAnd", this.groupScan.getHBaseScanSpec(), parsedSpec);
/*
* If RowFilter is THE filter attached to the scan specification,
* remove it since its effect is also achieved through startRow and stopRow.
*/
Filter filter = parsedSpec.getFilter();
if (filter instanceof RowFilter && ((RowFilter) filter).getOperator() != CompareOp.NOT_EQUAL && ((RowFilter) filter).getComparator() instanceof BinaryComparator) {
parsedSpec = new HBaseScanSpec(parsedSpec.getTableName(), parsedSpec.getStartRow(), parsedSpec.getStopRow(), null);
}
}
return parsedSpec;
}
use of org.apache.hadoop.hbase.filter.RowFilter in project hbase by apache.
the class FromClientSideBase method createScanWithRowFilter.
/*
* @param key
* @param op
* @param startRow
* @return Scan with RowFilter that does CompareOp op on passed key.
*/
protected Scan createScanWithRowFilter(final byte[] key, final byte[] startRow, CompareOperator op) {
// Make sure key is of some substance... non-null and > than first key.
assertTrue(key != null && key.length > 0 && Bytes.BYTES_COMPARATOR.compare(key, new byte[] { 'a', 'a', 'a' }) >= 0);
LOG.info("Key=" + Bytes.toString(key));
Scan s = startRow == null ? new Scan() : new Scan().withStartRow(startRow);
Filter f = new RowFilter(op, new BinaryComparator(key));
f = new WhileMatchFilter(f);
s.setFilter(f);
return s;
}
Aggregations