use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class VectorizationContext method getWhenExpression.
private VectorExpression getWhenExpression(List<ExprNodeDesc> childExpr, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
if (mode != VectorExpressionDescriptor.Mode.PROJECTION) {
return null;
}
final int size = childExpr.size();
final ExprNodeDesc whenDesc = childExpr.get(0);
final ExprNodeDesc thenDesc = childExpr.get(1);
final ExprNodeDesc elseDesc;
if (size == 2) {
elseDesc = new ExprNodeConstantDesc(returnType, null);
} else if (size == 3) {
elseDesc = childExpr.get(2);
} else {
final GenericUDFWhen udfWhen = new GenericUDFWhen();
elseDesc = new ExprNodeGenericFuncDesc(returnType, udfWhen, udfWhen.getUdfName(), childExpr.subList(2, childExpr.size()));
}
// Transform CASE WHEN with just a THEN/ELSE into an IF statement.
final GenericUDFIf genericUDFIf = new GenericUDFIf();
final List<ExprNodeDesc> ifChildExpr = Arrays.<ExprNodeDesc>asList(whenDesc, thenDesc, elseDesc);
return getIfExpression(genericUDFIf, ifChildExpr, mode, returnType);
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class VectorizationContext method evaluateCastOnConstants.
/**
* Handles only the special cases of cast/+ve/-ve operator on a constant.
* @param exprDesc
* @return The same expression if no evaluation done, else return the constant
* expression.
* @throws HiveException
*/
ExprNodeDesc evaluateCastOnConstants(ExprNodeDesc exprDesc) throws HiveException {
if (!(exprDesc instanceof ExprNodeGenericFuncDesc)) {
return exprDesc;
}
if (exprDesc.getChildren() == null || (exprDesc.getChildren().size() != 1)) {
return exprDesc;
}
ExprNodeConstantDesc foldedChild = null;
if (!(exprDesc.getChildren().get(0) instanceof ExprNodeConstantDesc)) {
// try recursive folding
ExprNodeDesc expr = evaluateCastOnConstants(exprDesc.getChildren().get(0));
if (expr instanceof ExprNodeConstantDesc) {
foldedChild = (ExprNodeConstantDesc) expr;
}
} else {
foldedChild = (ExprNodeConstantDesc) exprDesc.getChildren().get(0);
}
if (foldedChild == null) {
return exprDesc;
}
ObjectInspector childoi = foldedChild.getWritableObjectInspector();
GenericUDF gudf = ((ExprNodeGenericFuncDesc) exprDesc).getGenericUDF();
// Only evaluate +ve/-ve or cast on constant or recursive casting.
if (gudf instanceof GenericUDFOPNegative || gudf instanceof GenericUDFOPPositive || castExpressionUdfs.contains(gudf.getClass()) || ((gudf instanceof GenericUDFBridge) && castExpressionUdfs.contains(((GenericUDFBridge) gudf).getUdfClass()))) {
ExprNodeEvaluator<?> evaluator = ExprNodeEvaluatorFactory.get(exprDesc);
ObjectInspector output = evaluator.initialize(childoi);
Object constant = evaluator.evaluate(null);
Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
return new ExprNodeConstantDesc(exprDesc.getTypeInfo(), java);
}
return exprDesc;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class VectorizationContext method getGenericUDFBridgeVectorExpression.
/**
* Invoke special handling for expressions that can't be vectorized by regular
* descriptor based lookup.
*/
private VectorExpression getGenericUDFBridgeVectorExpression(GenericUDFBridge udf, List<ExprNodeDesc> childExpr, VectorExpressionDescriptor.Mode mode, TypeInfo returnType) throws HiveException {
Class<? extends UDF> cl = udf.getUdfClass();
VectorExpression ve = null;
if (isCastToIntFamily(cl)) {
PrimitiveCategory integerPrimitiveCategory = getAnyIntegerPrimitiveCategoryFromUdfClass(cl);
ve = getCastToLongExpression(childExpr, integerPrimitiveCategory);
} else if (cl.equals(UDFToBoolean.class)) {
ve = getCastToBoolean(childExpr);
} else if (isCastToFloatFamily(cl)) {
ve = getCastToDoubleExpression(cl, childExpr, returnType);
} else if (cl.equals(UDFToString.class)) {
ve = getCastToString(childExpr, returnType);
}
if (ve == null && childExpr instanceof ExprNodeGenericFuncDesc) {
ve = getCustomUDFExpression((ExprNodeGenericFuncDesc) childExpr, mode);
}
return ve;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestOperatorSignature method getTsOp.
private Operator<TableScanDesc> getTsOp(int i) {
Table tblMetadata = new Table("db", "table");
// FIXME: I think this shouldn't be sensitive to the alias; but currently its included in logicalEquals...check that
TableScanDesc desc = new TableScanDesc("alias", /*+ cCtx.nextOperatorId()*/
tblMetadata);
List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false));
ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as);
desc.setFilterExpr(f1);
Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc);
return ts;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc in project hive by apache.
the class TestAccumuloRangeGenerator method testRangeOverNonRowIdField.
@Test
public void testRangeOverNonRowIdField() throws Exception {
// foo >= 'f'
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "foo", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
// foo <= 'm'
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "foo", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2);
assertNotNull(node2);
// And UDF
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "rid");
Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<Rule, NodeProcessor>emptyMap(), null);
GraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(both);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
// Filters are not over the rowid, therefore scan everything
Object result = nodeOutput.get(both);
Assert.assertNull(result);
}
Aggregations