use of org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec in project hive by apache.
the class SemanticAnalyzer method processPTFPartitionSpec.
private PartitioningSpec processPTFPartitionSpec(ASTNode pSpecNode) {
PartitioningSpec partitioning = new PartitioningSpec();
ASTNode firstChild = (ASTNode) pSpecNode.getChild(0);
int type = firstChild.getType();
if (type == HiveParser.TOK_DISTRIBUTEBY || type == HiveParser.TOK_CLUSTERBY) {
PartitionSpec pSpec = processPartitionSpec(firstChild);
partitioning.setPartSpec(pSpec);
ASTNode sortNode = pSpecNode.getChildCount() > 1 ? (ASTNode) pSpecNode.getChild(1) : null;
if (sortNode != null) {
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
} else if (type == HiveParser.TOK_SORTBY || type == HiveParser.TOK_ORDERBY) {
OrderSpec oSpec = processOrderSpec(firstChild);
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
use of org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec in project hive by apache.
the class ExprNodeConverter method visitOver.
@Override
public ExprNodeDesc visitOver(RexOver over) {
if (!deep) {
return null;
}
final RexWindow window = over.getWindow();
final WindowSpec windowSpec = new WindowSpec();
final PartitioningSpec partitioningSpec = getPSpec(window);
windowSpec.setPartitioning(partitioningSpec);
final WindowFrameSpec windowFrameSpec = getWindowRange(window);
windowSpec.setWindowFrame(windowFrameSpec);
WindowFunctionSpec wfs = new WindowFunctionSpec();
wfs.setWindowSpec(windowSpec);
final Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
final ASTNode wUDAFAst = new ASTConverter.RexVisitor(schema).visitOver(over);
wfs.setExpression(wUDAFAst);
ASTNode nameNode = (ASTNode) wUDAFAst.getChild(0);
wfs.setName(nameNode.getText());
for (int i = 1; i < wUDAFAst.getChildCount() - 1; i++) {
ASTNode child = (ASTNode) wUDAFAst.getChild(i);
wfs.addArg(child);
}
if (wUDAFAst.getText().equals("TOK_FUNCTIONSTAR")) {
wfs.setStar(true);
}
String columnAlias = getWindowColumnAlias();
wfs.setAlias(columnAlias);
this.windowFunctionSpecs.add(wfs);
return new ExprNodeColumnDesc(TypeConverter.convert(over.getType()), columnAlias, tabAlias, false);
}
use of org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec in project hive by apache.
the class ExprNodeConverter method getPSpec.
private PartitioningSpec getPSpec(RexWindow window) {
PartitioningSpec partitioning = new PartitioningSpec();
Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
if (window.partitionKeys != null && !window.partitionKeys.isEmpty()) {
PartitionSpec pSpec = new PartitionSpec();
for (RexNode pk : window.partitionKeys) {
PartitionExpression exprSpec = new PartitionExpression();
ASTNode astNode = pk.accept(new RexVisitor(schema));
exprSpec.setExpression(astNode);
pSpec.addExpression(exprSpec);
}
partitioning.setPartSpec(pSpec);
}
if (window.orderKeys != null && !window.orderKeys.isEmpty()) {
OrderSpec oSpec = new OrderSpec();
for (RexFieldCollation ok : window.orderKeys) {
OrderExpression exprSpec = new OrderExpression();
Order order = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? Order.ASC : Order.DESC;
NullOrder nullOrder;
if (ok.right.contains(SqlKind.NULLS_FIRST)) {
nullOrder = NullOrder.NULLS_FIRST;
} else if (ok.right.contains(SqlKind.NULLS_LAST)) {
nullOrder = NullOrder.NULLS_LAST;
} else {
// Default
nullOrder = ok.getDirection() == RelFieldCollation.Direction.ASCENDING ? NullOrder.NULLS_FIRST : NullOrder.NULLS_LAST;
}
exprSpec.setOrder(order);
exprSpec.setNullOrder(nullOrder);
ASTNode astNode = ok.left.accept(new RexVisitor(schema));
exprSpec.setExpression(astNode);
oSpec.addExpression(exprSpec);
}
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
use of org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec in project hive by apache.
the class SemanticAnalyzer method processPTFChain.
/*
* - tree form is
* ^(TOK_PTBLFUNCTION name alias? partitionTableFunctionSource partitioningSpec? arguments*)
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
*/
private PartitionedTableFunctionSpec processPTFChain(QB qb, ASTNode ptf) throws SemanticException {
int child_count = ptf.getChildCount();
if (child_count < 2) {
throw new SemanticException(generateErrorMessage(ptf, "Not enough Children " + child_count));
}
PartitionedTableFunctionSpec ptfSpec = new PartitionedTableFunctionSpec();
ptfSpec.setAstNode(ptf);
/*
* name
*/
ASTNode nameNode = (ASTNode) ptf.getChild(0);
ptfSpec.setName(nameNode.getText());
int inputIdx = 1;
/*
* alias
*/
ASTNode secondChild = (ASTNode) ptf.getChild(1);
if (secondChild.getType() == HiveParser.Identifier) {
ptfSpec.setAlias(secondChild.getText());
inputIdx++;
}
/*
* input
*/
ASTNode inputNode = (ASTNode) ptf.getChild(inputIdx);
ptfSpec.setInput(processPTFSource(qb, inputNode));
int argStartIdx = inputIdx + 1;
/*
* partitioning Spec
*/
int pSpecIdx = inputIdx + 1;
ASTNode pSpecNode = ptf.getChildCount() > inputIdx ? (ASTNode) ptf.getChild(pSpecIdx) : null;
if (pSpecNode != null && pSpecNode.getType() == HiveParser.TOK_PARTITIONINGSPEC) {
PartitioningSpec partitioning = processPTFPartitionSpec(pSpecNode);
ptfSpec.setPartitioning(partitioning);
argStartIdx++;
}
/*
* arguments
*/
for (int i = argStartIdx; i < ptf.getChildCount(); i++) {
ptfSpec.addArg((ASTNode) ptf.getChild(i));
}
return ptfSpec;
}
use of org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec in project hive by apache.
the class WindowingComponentizer method groupFunctions.
private void groupFunctions() throws SemanticException {
for (WindowExpressionSpec expr : originalSpec.getWindowExpressions()) {
WindowFunctionSpec wFn = (WindowFunctionSpec) expr;
PartitioningSpec wFnGrp = wFn.getWindowSpec().getPartitioning();
WindowingSpec wSpec = groups.get(wFnGrp);
if (wSpec == null) {
wSpec = new WindowingSpec();
groups.put(wFnGrp, wSpec);
}
wSpec.addWindowFunction(wFn);
}
}
Aggregations