use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class TestAccumuloPredicateHandler method testPushdownTuple.
@Test
public void testPushdownTuple() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf);
assertEquals(sConditions.size(), 1);
IndexSearchCondition sc = sConditions.get(0);
PushdownTuple tuple = new PushdownTuple(sConditions.get(0), handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc));
byte[] expectedVal = new byte[4];
ByteBuffer.wrap(expectedVal).putInt(5);
assertArrayEquals(tuple.getConstVal(), expectedVal);
assertEquals(tuple.getcOpt().getClass(), Equal.class);
assertEquals(tuple.getpCompare().getClass(), IntCompare.class);
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class AccumuloPredicateHandler method decompose.
/**
* @param conf
* Configuration
* @param desc
* predicate expression node.
* @return DecomposedPredicate containing translated search conditions the analyzer can support.
*/
public DecomposedPredicate decompose(Configuration conf, ExprNodeDesc desc) {
IndexPredicateAnalyzer analyzer = newAnalyzer(conf);
List<IndexSearchCondition> sConditions = new ArrayList<IndexSearchCondition>();
ExprNodeDesc residualPredicate = analyzer.analyzePredicate(desc, sConditions);
if (sConditions.size() == 0) {
LOG.info("nothing to decompose. Returning");
return null;
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(sConditions);
decomposedPredicate.residualPredicate = (ExprNodeGenericFuncDesc) residualPredicate;
return decomposedPredicate;
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class HBaseStorageHandler method decomposePredicate.
public static DecomposedPredicate decomposePredicate(JobConf jobConf, HBaseSerDe hBaseSerDe, ExprNodeDesc predicate) {
ColumnMapping keyMapping = hBaseSerDe.getHBaseSerdeParam().getKeyColumnMapping();
ColumnMapping tsMapping = hBaseSerDe.getHBaseSerdeParam().getTimestampColumnMapping();
IndexPredicateAnalyzer analyzer = HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyMapping.columnName, keyMapping.isComparable(), tsMapping == null ? null : tsMapping.columnName);
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
ExprNodeGenericFuncDesc pushedPredicate = null;
ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
for (List<IndexSearchCondition> searchConditions : HiveHBaseInputFormatUtil.decompose(conditions).values()) {
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
// there were complex predicates which we don't support yet.
// Currently supported are one of the form:
// 1. key < 20 (size = 1)
// 2. key = 20 (size = 1)
// 3. key < 20 and key > 10 (size = 2)
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
if (scSize == 2 && (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))) {
// If one of the predicates is =, then any other predicate with it is illegal.
// Add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
boolean sameType = sameTypeIndexSearchConditions(searchConditions);
if (!sameType) {
// If type for column and constant are different, we currently do not support pushing them
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
// would be returned.
if (scSize == 2) {
boolean lowerBound = false;
boolean upperBound = false;
if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName()) || searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName())) {
lowerBound = true;
} else {
upperBound = true;
}
if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName())) {
upperBound = true;
} else {
lowerBound = true;
}
if (!upperBound || !lowerBound) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
} else {
// scSize == 1
if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
// Not valid range, add to residual
residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
continue;
}
}
}
// This one can be pushed
pushedPredicate = extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = pushedPredicate;
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class HiveHBaseTableInputFormat method createFilterScan.
/**
* Converts a filter (which has been pushed down from Hive's optimizer)
* into corresponding restrictions on the HBase scan. The
* filter should already be in a form which can be fully converted.
*
* @param jobConf configuration for the scan
*
* @param iKey 0-based offset of key column within Hive table
*
* @return converted table split if any
*/
private Scan createFilterScan(JobConf jobConf, int iKey, int iTimestamp, boolean isKeyBinary) throws IOException {
// TODO: assert iKey is HBaseSerDe#HBASE_KEY_COL
Scan scan = new Scan();
String filterObjectSerialized = jobConf.get(TableScanDesc.FILTER_OBJECT_CONF_STR);
if (filterObjectSerialized != null) {
HiveHBaseInputFormatUtil.setupScanRange(scan, filterObjectSerialized, jobConf, false);
return scan;
}
String filterExprSerialized = jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
if (filterExprSerialized == null) {
return scan;
}
ExprNodeGenericFuncDesc filterExpr = SerializationUtilities.deserializeExpression(filterExprSerialized);
String keyColName = jobConf.get(serdeConstants.LIST_COLUMNS).split(",")[iKey];
ArrayList<TypeInfo> cols = TypeInfoUtils.getTypeInfosFromTypeString(jobConf.get(serdeConstants.LIST_COLUMN_TYPES));
String colType = cols.get(iKey).getTypeName();
boolean isKeyComparable = isKeyBinary || "string".equalsIgnoreCase(colType);
String tsColName = null;
if (iTimestamp >= 0) {
tsColName = jobConf.get(serdeConstants.LIST_COLUMNS).split(",")[iTimestamp];
}
IndexPredicateAnalyzer analyzer = newIndexPredicateAnalyzer(keyColName, isKeyComparable, tsColName);
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
ExprNodeDesc residualPredicate = analyzer.analyzePredicate(filterExpr, conditions);
// THIS IGNORES RESIDUAL PARSING FROM HBaseStorageHandler#decomposePredicate
if (residualPredicate != null) {
LOG.debug("Ignoring residual predicate " + residualPredicate.getExprString());
}
Map<String, List<IndexSearchCondition>> split = HiveHBaseInputFormatUtil.decompose(conditions);
List<IndexSearchCondition> keyConditions = split.get(keyColName);
if (keyConditions != null && !keyConditions.isEmpty()) {
HiveHBaseInputFormatUtil.setupKeyRange(scan, keyConditions, isKeyBinary);
}
List<IndexSearchCondition> tsConditions = split.get(tsColName);
if (tsConditions != null && !tsConditions.isEmpty()) {
HiveHBaseInputFormatUtil.setupTimeRange(scan, tsConditions);
}
return scan;
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class SampleHBasePredicateDecomposer method getScanRange.
@Override
public HBaseScanRange getScanRange(List<IndexSearchCondition> searchConditions) throws Exception {
Map<String, List<IndexSearchCondition>> fieldConds = new HashMap<String, List<IndexSearchCondition>>();
for (IndexSearchCondition condition : searchConditions) {
String fieldName = condition.getFields()[0];
List<IndexSearchCondition> fieldCond = fieldConds.get(fieldName);
if (fieldCond == null) {
fieldConds.put(fieldName, fieldCond = new ArrayList<IndexSearchCondition>());
}
fieldCond.add(condition);
}
List<Filter> filters = new ArrayList<Filter>();
HBaseScanRange range = new HBaseScanRange();
StructTypeInfo type = (StructTypeInfo) keyMapping.columnType;
for (String name : type.getAllStructFieldNames()) {
List<IndexSearchCondition> fieldCond = fieldConds.get(name);
if (fieldCond == null || fieldCond.size() > 2) {
continue;
}
for (IndexSearchCondition condition : fieldCond) {
if (condition.getConstantDesc().getValue() == null) {
continue;
}
String comparisonOp = condition.getComparisonOp();
String constantVal = String.valueOf(condition.getConstantDesc().getValue());
byte[] valueAsBytes = toBinary(constantVal, FIXED_LENGTH, false, false);
if (comparisonOp.endsWith("UDFOPEqualOrGreaterThan")) {
filters.add(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPGreaterThan")) {
filters.add(new RowFilter(CompareOp.GREATER, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPEqualOrLessThan")) {
filters.add(new RowFilter(CompareOp.LESS_OR_EQUAL, new BinaryComparator(valueAsBytes)));
} else if (comparisonOp.endsWith("UDFOPLessThan")) {
filters.add(new RowFilter(CompareOp.LESS, new BinaryComparator(valueAsBytes)));
} else {
throw new IOException(comparisonOp + " is not a supported comparison operator");
}
}
}
if (!filters.isEmpty()) {
range.addFilter(new FilterList(Operator.MUST_PASS_ALL, filters));
}
return range;
}
Aggregations