use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class AbstractHBaseKeyPredicateDecomposer method decomposePredicate.
public DecomposedPredicate decomposePredicate(String keyColName, ExprNodeDesc predicate) {
IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(true);
analyzer.allowColumnName(keyColName);
analyzer.setAcceptsFields(true);
analyzer.setFieldValidator(getFieldValidator());
DecomposedPredicate decomposed = new DecomposedPredicate();
List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
decomposed.residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
if (!conditions.isEmpty()) {
decomposed.pushedPredicate = analyzer.translateSearchConditions(conditions);
try {
decomposed.pushedPredicateObject = getScanRange(conditions);
} catch (Exception e) {
LOG.warn("Failed to decompose predicates", e);
return null;
}
}
return decomposed;
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class HiveHBaseInputFormatUtil method setupKeyRange.
static void setupKeyRange(Scan scan, List<IndexSearchCondition> conditions, boolean isBinary) throws IOException {
// Convert the search condition into a restriction on the HBase scan
byte[] startRow = HConstants.EMPTY_START_ROW, stopRow = HConstants.EMPTY_END_ROW;
for (IndexSearchCondition sc : conditions) {
ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(sc.getConstantDesc());
PrimitiveObjectInspector objInspector;
Object writable;
try {
objInspector = (PrimitiveObjectInspector) eval.initialize(null);
writable = eval.evaluate(null);
} catch (ClassCastException cce) {
throw new IOException("Currently only primitve types are supported. Found: " + sc.getConstantDesc().getTypeString());
} catch (HiveException e) {
throw new IOException(e);
}
byte[] constantVal = getConstantVal(writable, objInspector, isBinary);
String comparisonOp = sc.getComparisonOp();
if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual".equals(comparisonOp)) {
startRow = constantVal;
stopRow = getNextBA(constantVal);
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan".equals(comparisonOp)) {
stopRow = constantVal;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan".equals(comparisonOp)) {
startRow = constantVal;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan".equals(comparisonOp)) {
startRow = getNextBA(constantVal);
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan".equals(comparisonOp)) {
stopRow = getNextBA(constantVal);
} else {
throw new IOException(comparisonOp + " is not a supported comparison operator");
}
}
scan.setStartRow(startRow);
scan.setStopRow(stopRow);
if (LOG.isDebugEnabled()) {
LOG.debug(Bytes.toStringBinary(startRow) + " ~ " + Bytes.toStringBinary(stopRow));
}
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class HiveHBaseInputFormatUtil method setupTimeRange.
static void setupTimeRange(Scan scan, List<IndexSearchCondition> conditions) throws IOException {
long start = 0;
long end = Long.MAX_VALUE;
for (IndexSearchCondition sc : conditions) {
long timestamp = getTimestampVal(sc);
String comparisonOp = sc.getComparisonOp();
if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual".equals(comparisonOp)) {
start = timestamp;
end = timestamp + 1;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan".equals(comparisonOp)) {
end = timestamp;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan".equals(comparisonOp)) {
start = timestamp;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan".equals(comparisonOp)) {
start = timestamp + 1;
} else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan".equals(comparisonOp)) {
end = timestamp + 1;
} else {
throw new IOException(comparisonOp + " is not a supported comparison operator");
}
}
scan.setTimeRange(start, end);
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class HiveHBaseInputFormatUtil method decompose.
public static Map<String, List<IndexSearchCondition>> decompose(List<IndexSearchCondition> searchConditions) {
Map<String, List<IndexSearchCondition>> result = new HashMap<String, List<IndexSearchCondition>>();
for (IndexSearchCondition condition : searchConditions) {
List<IndexSearchCondition> conditions = result.get(condition.getColumnDesc().getColumn());
if (conditions == null) {
conditions = new ArrayList<IndexSearchCondition>();
result.put(condition.getColumnDesc().getColumn(), conditions);
}
conditions.add(condition);
}
return result;
}
use of org.apache.hadoop.hive.ql.index.IndexSearchCondition in project hive by apache.
the class SampleHBaseKeyFactory2 method decomposePredicate.
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) {
String keyColName = keyMapping.columnName;
IndexPredicateAnalyzer analyzer = IndexPredicateAnalyzer.createAnalyzer(false);
analyzer.allowColumnName(keyColName);
analyzer.setAcceptsFields(true);
DecomposedPredicate decomposed = new DecomposedPredicate();
List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>();
decomposed.residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, searchConditions);
if (!searchConditions.isEmpty()) {
decomposed.pushedPredicate = analyzer.translateSearchConditions(searchConditions);
try {
decomposed.pushedPredicateObject = setupFilter(keyColName, searchConditions);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return decomposed;
}
Aggregations