use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec in project hive by apache.
the class SemanticAnalyzer method processWindowFrame.
private WindowFrameSpec processWindowFrame(ASTNode node) throws SemanticException {
int type = node.getType();
BoundarySpec end = null;
/*
* A WindowFrame may contain just the Start Boundary or in the
* between style of expressing a WindowFrame both boundaries
* are specified.
*/
BoundarySpec start = processBoundary((ASTNode) node.getChild(0));
if (node.getChildCount() > 1) {
end = processBoundary((ASTNode) node.getChild(1));
}
// Note: TOK_WINDOWVALUES means RANGE type, TOK_WINDOWRANGE means ROWS type
return new WindowFrameSpec(type == HiveParser.TOK_WINDOWVALUES ? WindowType.RANGE : WindowType.ROWS, start, end);
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec in project hive by apache.
the class ExprNodeConverter method visitOver.
@Override
public ExprNodeDesc visitOver(RexOver over) {
if (!deep) {
return null;
}
final RexWindow window = over.getWindow();
final WindowSpec windowSpec = new WindowSpec();
final PartitioningSpec partitioningSpec = getPSpec(window);
windowSpec.setPartitioning(partitioningSpec);
final WindowFrameSpec windowFrameSpec = getWindowRange(window);
windowSpec.setWindowFrame(windowFrameSpec);
WindowFunctionSpec wfs = new WindowFunctionSpec();
wfs.setWindowSpec(windowSpec);
final Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
final ASTNode wUDAFAst = new ASTConverter.RexVisitor(schema).visitOver(over);
wfs.setExpression(wUDAFAst);
ASTNode nameNode = (ASTNode) wUDAFAst.getChild(0);
wfs.setName(nameNode.getText());
for (int i = 1; i < wUDAFAst.getChildCount() - 1; i++) {
ASTNode child = (ASTNode) wUDAFAst.getChild(i);
wfs.addArg(child);
}
if (wUDAFAst.getText().equals("TOK_FUNCTIONSTAR")) {
wfs.setStar(true);
}
String columnAlias = getWindowColumnAlias();
wfs.setAlias(columnAlias);
this.windowFunctionSpecs.add(wfs);
return new ExprNodeColumnDesc(TypeConverter.convert(over.getType()), columnAlias, tabAlias, false);
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec in project hive by apache.
the class ExprNodeConverter method getWindowRange.
private WindowFrameSpec getWindowRange(RexWindow window) {
// NOTE: in Hive AST Rows->Range(Physical) & Range -> Values (logical)
BoundarySpec start = null;
RexWindowBound lb = window.getLowerBound();
if (lb != null) {
start = getWindowBound(lb);
}
BoundarySpec end = null;
RexWindowBound ub = window.getUpperBound();
if (ub != null) {
end = getWindowBound(ub);
}
return new WindowFrameSpec(window.isRows() ? WindowType.ROWS : WindowType.RANGE, start, end);
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec in project hive by apache.
the class SemanticAnalyzer method processWindowSpec.
private WindowSpec processWindowSpec(ASTNode node) throws SemanticException {
boolean hasSrcId = false, hasPartSpec = false, hasWF = false;
int srcIdIdx = -1, partIdx = -1, wfIdx = -1;
for (int i = 0; i < node.getChildCount(); i++) {
int type = node.getChild(i).getType();
switch(type) {
case HiveParser.Identifier:
hasSrcId = true;
srcIdIdx = i;
break;
case HiveParser.TOK_PARTITIONINGSPEC:
hasPartSpec = true;
partIdx = i;
break;
case HiveParser.TOK_WINDOWRANGE:
case HiveParser.TOK_WINDOWVALUES:
hasWF = true;
wfIdx = i;
break;
}
}
WindowSpec ws = new WindowSpec();
if (hasSrcId) {
ASTNode nameNode = (ASTNode) node.getChild(srcIdIdx);
ws.setSourceId(nameNode.getText());
}
if (hasPartSpec) {
ASTNode partNode = (ASTNode) node.getChild(partIdx);
PartitioningSpec partitioning = processPTFPartitionSpec(partNode);
ws.setPartitioning(partitioning);
}
if (hasWF) {
ASTNode wfNode = (ASTNode) node.getChild(wfIdx);
WindowFrameSpec wfSpec = processWindowFrame(wfNode);
ws.setWindowFrame(wfSpec);
}
return ws;
}
Aggregations