use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec in project hive by apache.
the class SemanticAnalyzer method processQueryWindowClause.
private void processQueryWindowClause(WindowingSpec spec, ASTNode node) throws SemanticException {
ASTNode nameNode = (ASTNode) node.getChild(0);
ASTNode wsNode = (ASTNode) node.getChild(1);
if (spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())) {
throw new SemanticException(generateErrorMessage(nameNode, "Duplicate definition of window " + nameNode.getText() + " is not allowed"));
}
WindowSpec ws = processWindowSpec(wsNode);
spec.addWindowSpec(nameNode.getText(), ws);
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec in project hive by apache.
the class SemanticAnalyzer method processWindowFunction.
private WindowFunctionSpec processWindowFunction(ASTNode node, ASTNode wsNode) throws SemanticException {
WindowFunctionSpec wfSpec = new WindowFunctionSpec();
switch(node.getType()) {
case HiveParser.TOK_FUNCTIONSTAR:
wfSpec.setStar(true);
break;
case HiveParser.TOK_FUNCTIONDI:
wfSpec.setDistinct(true);
break;
}
wfSpec.setExpression(node);
ASTNode nameNode = (ASTNode) node.getChild(0);
wfSpec.setName(nameNode.getText());
for (int i = 1; i < node.getChildCount() - 1; i++) {
ASTNode child = (ASTNode) node.getChild(i);
wfSpec.addArg(child);
}
if (wsNode != null) {
WindowSpec ws = processWindowSpec(wsNode);
wfSpec.setWindowSpec(ws);
}
return wfSpec;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec in project hive by apache.
the class ExprNodeConverter method visitOver.
@Override
public ExprNodeDesc visitOver(RexOver over) {
if (!deep) {
return null;
}
final RexWindow window = over.getWindow();
final WindowSpec windowSpec = new WindowSpec();
final PartitioningSpec partitioningSpec = getPSpec(window);
windowSpec.setPartitioning(partitioningSpec);
final WindowFrameSpec windowFrameSpec = getWindowRange(window);
windowSpec.setWindowFrame(windowFrameSpec);
WindowFunctionSpec wfs = new WindowFunctionSpec();
wfs.setWindowSpec(windowSpec);
final Schema schema = new Schema(tabAlias, inputRowType.getFieldList());
final ASTNode wUDAFAst = new ASTConverter.RexVisitor(schema).visitOver(over);
wfs.setExpression(wUDAFAst);
ASTNode nameNode = (ASTNode) wUDAFAst.getChild(0);
wfs.setName(nameNode.getText());
for (int i = 1; i < wUDAFAst.getChildCount() - 1; i++) {
ASTNode child = (ASTNode) wUDAFAst.getChild(i);
wfs.addArg(child);
}
if (wUDAFAst.getText().equals("TOK_FUNCTIONSTAR")) {
wfs.setStar(true);
}
String columnAlias = getWindowColumnAlias();
wfs.setAlias(columnAlias);
this.windowFunctionSpecs.add(wfs);
return new ExprNodeColumnDesc(TypeConverter.convert(over.getType()), columnAlias, tabAlias, false);
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec in project hive by apache.
the class PTFTranslator method translate.
private WindowFunctionDef translate(WindowTableFunctionDef wdwTFnDef, WindowFunctionSpec spec) throws SemanticException {
WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(spec.getName());
if (wFnInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(spec.getName()));
}
WindowFunctionDef def = new WindowFunctionDef();
def.setName(spec.getName());
def.setAlias(spec.getAlias());
def.setDistinct(spec.isDistinct());
def.setExpressionTreeString(spec.getExpression().toStringTree());
def.setStar(spec.isStar());
def.setPivotResult(wFnInfo.isPivotResult());
ShapeDetails inpShape = wdwTFnDef.getRawInputShape();
/*
* translate args
*/
ArrayList<ASTNode> args = spec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
PTFExpressionDef argDef = null;
try {
argDef = buildExpressionDef(inpShape, expr);
} catch (HiveException he) {
throw new SemanticException(he);
}
def.addArg(argDef);
}
}
if (FunctionRegistry.isRankingFunction(spec.getName())) {
setupRankingArgs(wdwTFnDef, def, spec);
}
WindowSpec wdwSpec = spec.getWindowSpec();
if (wdwSpec != null) {
String desc = spec.toString();
WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec);
if (!wFnInfo.isSupportsWindow()) {
BoundarySpec start = wdwSpec.getWindowFrame().getStart();
if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
throw new SemanticException(String.format("Expecting left window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
}
BoundarySpec end = wdwSpec.getWindowFrame().getEnd();
if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
throw new SemanticException(String.format("Expecting right window frame boundary for " + "function %s to be unbounded. Found : %d", desc, start.getAmt()));
}
}
def.setWindowFrame(wdwFrame);
}
try {
setupWdwFnEvaluator(def);
} catch (HiveException he) {
throw new SemanticException(he);
}
return def;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec in project hive by apache.
the class SemanticAnalyzer method processWindowSpec.
private WindowSpec processWindowSpec(ASTNode node) throws SemanticException {
String sourceId = null;
PartitionSpec partition = null;
OrderSpec order = null;
WindowFrameSpec windowFrame = null;
boolean hasSrcId = false, hasPartSpec = false, hasWF = false;
int srcIdIdx = -1, partIdx = -1, wfIdx = -1;
for (int i = 0; i < node.getChildCount(); i++) {
int type = node.getChild(i).getType();
switch(type) {
case HiveParser.Identifier:
hasSrcId = true;
srcIdIdx = i;
break;
case HiveParser.TOK_PARTITIONINGSPEC:
hasPartSpec = true;
partIdx = i;
break;
case HiveParser.TOK_WINDOWRANGE:
case HiveParser.TOK_WINDOWVALUES:
hasWF = true;
wfIdx = i;
break;
}
}
WindowSpec ws = new WindowSpec();
if (hasSrcId) {
ASTNode nameNode = (ASTNode) node.getChild(srcIdIdx);
ws.setSourceId(nameNode.getText());
}
if (hasPartSpec) {
ASTNode partNode = (ASTNode) node.getChild(partIdx);
PartitioningSpec partitioning = processPTFPartitionSpec(partNode);
ws.setPartitioning(partitioning);
}
if (hasWF) {
ASTNode wfNode = (ASTNode) node.getChild(wfIdx);
WindowFrameSpec wfSpec = processWindowFrame(wfNode);
ws.setWindowFrame(wfSpec);
}
return ws;
}
Aggregations