use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class TestValueBoundaryScanner method testDateEquals.
@Test
public void testDateEquals() {
PTFExpressionDef argDef = new PTFExpressionDef();
argDef.setOI(PrimitiveObjectInspectorFactory.writableDateObjectInspector);
DateValueBoundaryScanner scanner = new DateValueBoundaryScanner(null, null, new OrderExpressionDef(argDef), false);
Date date = new Date();
// epoch+1 day
date.setTimeInMillis(86400000);
DateWritableV2 w1 = new DateWritableV2(date);
DateWritableV2 w2 = new DateWritableV2(date);
// empty
DateWritableV2 w3 = new DateWritableV2();
Assert.assertTrue(scanner.isEqual(w1, w2));
Assert.assertTrue(scanner.isEqual(w2, w1));
// empty == epoch
Assert.assertTrue(scanner.isEqual(w3, new DateWritableV2(new Date())));
// empty != another non-epoch
Assert.assertFalse(scanner.isEqual(w3, w1));
Assert.assertFalse(scanner.isEqual(null, w2));
Assert.assertFalse(scanner.isEqual(w1, null));
Assert.assertTrue(scanner.isEqual(null, null));
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class TestValueBoundaryScanner method testStringEquals.
@Test
public void testStringEquals() {
PTFExpressionDef argDef = new PTFExpressionDef();
argDef.setOI(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
StringValueBoundaryScanner scanner = new StringValueBoundaryScanner(null, null, new OrderExpressionDef(argDef), false);
Text w1 = new Text("a");
Text w2 = new Text("b");
Assert.assertTrue(scanner.isEqual(w1, w1));
Assert.assertFalse(scanner.isEqual(w1, w2));
Assert.assertFalse(scanner.isEqual(w2, w1));
Assert.assertFalse(scanner.isEqual(null, w2));
Assert.assertFalse(scanner.isEqual(w1, null));
Assert.assertTrue(scanner.isEqual(null, null));
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class BasePartitionEvaluator method calcFunctionValue.
/**
* Given a partition iterator, calculate the function value
* @param pItr the partition pointer
* @return the function value
* @throws HiveException
*/
protected Object calcFunctionValue(PTFPartitionIterator<Object> pItr, LeadLagInfo leadLagInfo) throws HiveException {
// To handle the case like SUM(LAG(f)) over(), aggregation function includes
// LAG/LEAD call
PTFOperator.connectLeadLagFunctionsToPartition(leadLagInfo, pItr);
AggregationBuffer aggBuffer = wrappedEvaluator.getNewAggregationBuffer();
if (isCountEvaluator && parameters == null) {
// No need to iterate through entire iterator and read rowContainer again
return ObjectInspectorUtils.copyToStandardObject(new LongWritable(pItr.count()), outputOI);
}
Object[] argValues = new Object[parameters == null ? 0 : parameters.size()];
while (pItr.hasNext()) {
Object row = pItr.next();
int i = 0;
if (parameters != null) {
for (PTFExpressionDef param : parameters) {
argValues[i++] = param.getExprEvaluator().evaluate(row);
}
}
wrappedEvaluator.aggregate(aggBuffer, argValues);
}
// The object is reused during evaluating, make a copy here
return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI);
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class Vectorizer method fillInPTFEvaluators.
private static void fillInPTFEvaluators(List<WindowFunctionDef> windowsFunctions, String[] evaluatorFunctionNames, WindowFrameDef[] evaluatorWindowFrameDefs, List<ExprNodeDesc>[] evaluatorInputExprNodeDescLists) throws HiveException {
final int functionCount = windowsFunctions.size();
for (int i = 0; i < functionCount; i++) {
WindowFunctionDef winFunc = windowsFunctions.get(i);
evaluatorFunctionNames[i] = winFunc.getName();
evaluatorWindowFrameDefs[i] = winFunc.getWindowFrame();
List<PTFExpressionDef> args = winFunc.getArgs();
if (args != null) {
List<ExprNodeDesc> exprNodeDescList = new ArrayList<ExprNodeDesc>();
for (PTFExpressionDef arg : args) {
exprNodeDescList.add(arg.getExprNode());
}
evaluatorInputExprNodeDescLists[i] = exprNodeDescList;
}
}
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class PTFDeserializer method initialize.
protected void initialize(PartitionedTableFunctionDef def) throws HiveException {
ShapeDetails inpShape = def.getInput().getOutputShape();
/*
* 1. initialize args
*/
if (def.getArgs() != null) {
for (PTFExpressionDef arg : def.getArgs()) {
initialize(arg, inpShape);
}
}
/*
* 2. setup resolve, make connections
*/
TableFunctionEvaluator tEval = def.getTFunction();
// TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName());
TableFunctionResolver tResolver = constructResolver(def.getResolverClassName());
tResolver.initialize(hConf, ptfDesc, def, tEval);
/*
* 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape
*/
if (tEval.isTransformsRawInput()) {
tResolver.initializeRawInputOI();
initialize(def.getRawInputShape(), tEval.getRawInputOI());
} else {
def.setRawInputShape(inpShape);
}
inpShape = def.getRawInputShape();
/*
* 4. give Evaluator chance to setup for Output execution; setup Output shape.
*/
tResolver.initializeOutputOI();
initialize(def.getOutputShape(), tEval.getOutputOI());
}
Aggregations