use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge in project hive by apache.
the class StatsUtils method getNDVFor.
private static long getNDVFor(ExprNodeGenericFuncDesc engfd, long numRows, Statistics parentStats) {
GenericUDF udf = engfd.getGenericUDF();
if (!FunctionRegistry.isDeterministic(udf) && !FunctionRegistry.isRuntimeConstant(udf)) {
return numRows;
}
List<Long> ndvs = Lists.newArrayList();
Class<?> udfClass = udf instanceof GenericUDFBridge ? ((GenericUDFBridge) udf).getUdfClass() : udf.getClass();
NDV ndv = AnnotationUtils.getAnnotation(udfClass, NDV.class);
long udfNDV = Long.MAX_VALUE;
if (ndv != null) {
udfNDV = ndv.maxNdv();
} else {
for (String col : engfd.getCols()) {
ColStatistics stats = parentStats.getColumnStatisticsFromColName(col);
if (stats != null) {
ndvs.add(stats.getCountDistint());
}
}
}
long countDistincts = ndvs.isEmpty() ? numRows : addWithExpDecay(ndvs);
return Collections.min(Lists.newArrayList(countDistincts, udfNDV, numRows));
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge in project hive by apache.
the class TestAccumuloRangeGenerator method testCastExpression.
@Test
public void testCastExpression() throws Exception {
// 40 and 50
ExprNodeDesc fourty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 40), fifty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 50);
// +
GenericUDFOPPlus plus = new GenericUDFOPPlus();
// 40 + 50
ExprNodeGenericFuncDesc addition = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, plus, Arrays.asList(fourty, fifty));
// cast(.... as string)
UDFToString stringCast = new UDFToString();
GenericUDFBridge stringCastBridge = new GenericUDFBridge("cast", false, stringCast.getClass().getName());
// cast (40 + 50 as string)
ExprNodeGenericFuncDesc cast = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, stringCastBridge, "cast", Collections.<ExprNodeDesc>singletonList(addition));
ExprNodeDesc key = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "key", null, false);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), Arrays.asList(key, cast));
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "key");
Dispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<Rule, NodeProcessor>emptyMap(), null);
GraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(node);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
// Don't fail -- would be better to actually compute a range of [90,+inf)
Object result = nodeOutput.get(node);
Assert.assertNull(result);
}
use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge in project hive by apache.
the class ExprNodeDescUtils method foldConstant.
private static ExprNodeConstantDesc foldConstant(ExprNodeGenericFuncDesc func) {
GenericUDF udf = func.getGenericUDF();
if (!FunctionRegistry.isConsistentWithinQuery(udf)) {
return null;
}
try {
// resources may not be available at compile time.
if (udf instanceof GenericUDFBridge) {
UDF internal = ReflectionUtils.newInstance(((GenericUDFBridge) udf).getUdfClass(), null);
if (internal.getRequiredFiles() != null || internal.getRequiredJars() != null) {
return null;
}
} else {
if (udf.getRequiredFiles() != null || udf.getRequiredJars() != null) {
return null;
}
}
if (func.getChildren() != null) {
for (ExprNodeDesc child : func.getChildren()) {
if (child instanceof ExprNodeConstantDesc) {
continue;
}
if (child instanceof ExprNodeGenericFuncDesc) {
if (foldConstant((ExprNodeGenericFuncDesc) child) != null) {
continue;
}
}
return null;
}
}
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(func);
ObjectInspector output = evaluator.initialize(null);
Object constant = evaluator.evaluate(null);
Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
return new ExprNodeConstantDesc(java);
} catch (Exception e) {
return null;
}
}
Aggregations