use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class PTFTranslator method buildExpressionDef.
/*
* Expr translation helper methods
*/
public PTFExpressionDef buildExpressionDef(ShapeDetails inpShape, ASTNode arg) throws HiveException {
PTFExpressionDef argDef = new PTFExpressionDef();
ExprNodeDesc exprNode = semAly.genExprNodeDesc(arg, inpShape.getRr(), inpShape.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode);
ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape);
argDef.setExpressionTreeString(arg.toStringTree());
argDef.setExprNode(exprNode);
argDef.setExprEvaluator(exprEval);
argDef.setOI(oi);
return argDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class PTFTranslator method translate.
private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException {
if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) {
return null;
}
PartitionDef pDef = new PartitionDef();
for (PartitionExpression pExpr : spec.getExpressions()) {
PTFExpressionDef expDef = translate(inpShape, pExpr);
pDef.addExpression(expDef);
}
return pDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class PTFTranslator method translate.
private PTFExpressionDef translate(ShapeDetails inpShape, PartitionExpression pExpr) throws SemanticException {
PTFExpressionDef expDef = null;
try {
expDef = buildExpressionDef(inpShape, pExpr.getExpression());
} catch (HiveException he) {
throw new SemanticException(he);
}
PTFTranslator.validateComparable(expDef.getOI(), String.format("Partition Expression %s is not a comparable expression", pExpr.getExpression().toStringTree()));
return expDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class PTFTranslator method buildOrderExpressions.
/**
* Collect order expressions for RANGE based windowing
* @throws SemanticException
*/
private OrderDef buildOrderExpressions(ShapeDetails inpShape, List<OrderExpression> orderExpressions) throws SemanticException {
OrderDef orderDef = new OrderDef();
for (OrderExpression oe : orderExpressions) {
PTFTranslator.validateNoLeadLagInValueBoundarySpec(oe.getExpression());
PTFExpressionDef exprDef = null;
try {
exprDef = buildExpressionDef(inpShape, oe.getExpression());
} catch (HiveException he) {
throw new SemanticException(he);
}
PTFTranslator.validateValueBoundaryExprType(exprDef.getOI());
OrderExpressionDef orderExprDef = new OrderExpressionDef(exprDef);
orderExprDef.setOrder(oe.getOrder());
orderExprDef.setNullOrder(oe.getNullOrder());
orderDef.addExpression(orderExprDef);
}
return orderDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef in project hive by apache.
the class WindowingTableFunction method processRow.
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator#processRow(java
* .lang.Object)
*
* - hand row to each Function, provided there are enough rows for Function's
* window. - call getNextObject on each Function. - output as many rows as
* possible, based on minimum sz of Output List
*/
@Override
public List<Object> processRow(Object row) throws HiveException {
/*
* Once enough rows have been output, there is no need to process input rows.
*/
if (streamingState.rankLimitReached()) {
return null;
}
streamingState.rollingPart.append(row);
// Get back converted row
row = streamingState.rollingPart.getAt(streamingState.rollingPart.size() - 1);
WindowTableFunctionDef tabDef = (WindowTableFunctionDef) tableDef;
for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) {
WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i);
GenericUDAFEvaluator fnEval = wFn.getWFnEval();
int a = 0;
if (wFn.getArgs() != null) {
for (PTFExpressionDef arg : wFn.getArgs()) {
streamingState.funcArgs[i][a++] = arg.getExprEvaluator().evaluate(row);
}
}
if (fnEval != null && fnEval instanceof ISupportStreamingModeForWindowing) {
fnEval.aggregate(streamingState.aggBuffers[i], streamingState.funcArgs[i]);
Object out = ((ISupportStreamingModeForWindowing) fnEval).getNextResult(streamingState.aggBuffers[i]);
if (out != null) {
streamingState.fnOutputs[i].add(out == ISupportStreamingModeForWindowing.NULL_RESULT ? null : out);
}
} else {
int rowToProcess = streamingState.rollingPart.rowToProcess(wFn.getWindowFrame());
if (rowToProcess >= 0) {
Object out = evaluateWindowFunction(wFn, rowToProcess, streamingState.rollingPart);
streamingState.fnOutputs[i].add(out);
}
}
}
List<Object> oRows = new ArrayList<Object>();
while (true) {
boolean hasRow = streamingState.hasOutputRow();
if (!hasRow) {
break;
}
oRows.add(streamingState.nextOutputRow());
}
return oRows.size() == 0 ? null : oRows;
}
Aggregations