use of org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef in project hive by apache.
the class PTFTranslator method buildOrderExpressions.
/**
* Collect order expressions for RANGE based windowing
* @throws SemanticException
*/
private OrderDef buildOrderExpressions(ShapeDetails inpShape, List<OrderExpression> orderExpressions) throws SemanticException {
OrderDef orderDef = new OrderDef();
for (OrderExpression oe : orderExpressions) {
PTFTranslator.validateNoLeadLagInValueBoundarySpec(oe.getExpression());
PTFExpressionDef exprDef = null;
try {
exprDef = buildExpressionDef(inpShape, oe.getExpression());
} catch (HiveException he) {
throw new SemanticException(he);
}
PTFTranslator.validateValueBoundaryExprType(exprDef.getOI());
OrderExpressionDef orderExprDef = new OrderExpressionDef(exprDef);
orderExprDef.setOrder(oe.getOrder());
orderExprDef.setNullOrder(oe.getNullOrder());
orderDef.addExpression(orderExprDef);
}
return orderDef;
}
use of org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef in project hive by apache.
the class SemanticAnalyzer method buildPTFReduceSinkDetails.
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws SemanticException
*/
void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef, RowResolver inputRR, ArrayList<ExprNodeDesc> partCols, ArrayList<ExprNodeDesc> orderCols, StringBuilder orderString, StringBuilder nullOrderString) throws SemanticException {
List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
for (PTFExpressionDef colDef : partColList) {
ExprNodeDesc exprNode = colDef.getExprNode();
if (ExprNodeDescUtils.indexOf(exprNode, partCols) < 0) {
partCols.add(exprNode);
orderCols.add(exprNode);
orderString.append('+');
nullOrderString.append('a');
}
}
/*
* Order columns are used as key columns for constructing
* the ReduceSinkOperator
* Since we do not explicitly add these to outputColumnNames,
* we need to set includeKeyCols = false while creating the
* ReduceSinkDesc
*/
List<OrderExpressionDef> orderColList = tabDef.getOrder().getExpressions();
for (int i = 0; i < orderColList.size(); i++) {
OrderExpressionDef colDef = orderColList.get(i);
char orderChar = colDef.getOrder() == PTFInvocationSpec.Order.ASC ? '+' : '-';
char nullOrderChar = colDef.getNullOrder() == PTFInvocationSpec.NullOrder.NULLS_FIRST ? 'a' : 'z';
int index = ExprNodeDescUtils.indexOf(colDef.getExprNode(), orderCols);
if (index >= 0) {
orderString.setCharAt(index, orderChar);
nullOrderString.setCharAt(index, nullOrderChar);
continue;
}
orderCols.add(colDef.getExprNode());
orderString.append(orderChar);
nullOrderString.append(nullOrderChar);
}
}
use of org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef in project hive by apache.
the class MultiValueBoundaryScanner method getScanner.
@SuppressWarnings("incomplete-switch")
public static SingleValueBoundaryScanner getScanner(BoundaryDef start, BoundaryDef end, OrderDef orderDef) throws HiveException {
if (orderDef.getExpressions().size() != 1) {
throw new HiveException("Internal error: initializing SingleValueBoundaryScanner with" + " multiple expression for sorting");
}
OrderExpressionDef exprDef = orderDef.getExpressions().get(0);
PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) exprDef.getOI();
switch(pOI.getPrimitiveCategory()) {
case BYTE:
case INT:
case LONG:
case SHORT:
case TIMESTAMP:
return new LongValueBoundaryScanner(start, end, exprDef);
case DOUBLE:
case FLOAT:
return new DoubleValueBoundaryScanner(start, end, exprDef);
case DECIMAL:
return new HiveDecimalValueBoundaryScanner(start, end, exprDef);
case DATE:
return new DateValueBoundaryScanner(start, end, exprDef);
case STRING:
return new StringValueBoundaryScanner(start, end, exprDef);
}
throw new HiveException(String.format("Internal Error: attempt to setup a Window for datatype %s", pOI.getPrimitiveCategory()));
}
use of org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef in project hive by apache.
the class PTFTranslator method translate.
private OrderDef translate(ShapeDetails inpShape, OrderSpec spec, PartitionDef partitionDef) throws SemanticException {
OrderDef def = new OrderDef();
if (null == spec) {
return def;
}
for (OrderExpression oExpr : spec.getExpressions()) {
OrderExpressionDef oexpDef = translate(inpShape, oExpr);
def.addExpression(oexpDef);
}
return def;
}
use of org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef in project hive by apache.
the class PTFTranslator method setupRankingArgs.
private void setupRankingArgs(WindowTableFunctionDef wdwTFnDef, WindowFunctionDef wFnDef, WindowFunctionSpec wSpec) throws SemanticException {
if (wSpec.getArgs().size() > 0) {
throw new SemanticException("Ranking Functions can take no arguments");
}
OrderDef oDef = wdwTFnDef.getOrder();
List<OrderExpressionDef> oExprs = oDef.getExpressions();
for (OrderExpressionDef oExpr : oExprs) {
wFnDef.addArg(oExpr);
}
}
Aggregations