use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitioningSpec in project flink by apache.
the class HiveParserSemanticAnalyzer method processPTFChain.
/*
* - tree form is
* ^(TOK_PTBLFUNCTION name alias? partitionTableFunctionSource partitioningSpec? arguments*)
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
*/
private PartitionedTableFunctionSpec processPTFChain(HiveParserQB qb, HiveParserASTNode ptf) throws SemanticException {
int childCount = ptf.getChildCount();
if (childCount < 2) {
throw new SemanticException(HiveParserUtils.generateErrorMessage(ptf, "Not enough Children " + childCount));
}
PartitionedTableFunctionSpec ptfSpec = new PartitionedTableFunctionSpec();
ptfSpec.setAstNode(ptf);
// name
HiveParserASTNode nameNode = (HiveParserASTNode) ptf.getChild(0);
ptfSpec.setName(nameNode.getText());
int inputIdx = 1;
// alias
HiveParserASTNode secondChild = (HiveParserASTNode) ptf.getChild(1);
if (secondChild.getType() == HiveASTParser.Identifier) {
ptfSpec.setAlias(secondChild.getText());
inputIdx++;
}
// input
HiveParserASTNode inputNode = (HiveParserASTNode) ptf.getChild(inputIdx);
ptfSpec.setInput(processPTFSource(qb, inputNode));
int argStartIdx = inputIdx + 1;
// partitioning Spec
int pSpecIdx = inputIdx + 1;
HiveParserASTNode pSpecNode = ptf.getChildCount() > inputIdx ? (HiveParserASTNode) ptf.getChild(pSpecIdx) : null;
if (pSpecNode != null && pSpecNode.getType() == HiveASTParser.TOK_PARTITIONINGSPEC) {
PartitioningSpec partitioning = processPTFPartitionSpec(pSpecNode);
ptfSpec.setPartitioning(partitioning);
argStartIdx++;
}
// arguments
for (int i = argStartIdx; i < ptf.getChildCount(); i++) {
ptfSpec.addArg((HiveParserASTNode) ptf.getChild(i));
}
return ptfSpec;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitioningSpec in project flink by apache.
the class HiveParserBaseSemanticAnalyzer method processPTFPartitionSpec.
static PartitioningSpec processPTFPartitionSpec(HiveParserASTNode pSpecNode) {
PartitioningSpec partitioning = new PartitioningSpec();
HiveParserASTNode firstChild = (HiveParserASTNode) pSpecNode.getChild(0);
int type = firstChild.getType();
if (type == HiveASTParser.TOK_DISTRIBUTEBY || type == HiveASTParser.TOK_CLUSTERBY) {
PartitionSpec pSpec = processPartitionSpec(firstChild);
partitioning.setPartSpec(pSpec);
HiveParserASTNode sortNode = pSpecNode.getChildCount() > 1 ? (HiveParserASTNode) pSpecNode.getChild(1) : null;
if (sortNode != null) {
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
} else if (type == HiveASTParser.TOK_SORTBY || type == HiveASTParser.TOK_ORDERBY) {
OrderSpec oSpec = processOrderSpec(firstChild);
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitioningSpec in project flink by apache.
the class HiveParserBaseSemanticAnalyzer method processWindowSpec.
static HiveParserWindowingSpec.WindowSpec processWindowSpec(HiveParserASTNode node) throws SemanticException {
boolean hasSrcId = false, hasPartSpec = false, hasWF = false;
int srcIdIdx = -1, partIdx = -1, wfIdx = -1;
for (int i = 0; i < node.getChildCount(); i++) {
int type = node.getChild(i).getType();
switch(type) {
case HiveASTParser.Identifier:
hasSrcId = true;
srcIdIdx = i;
break;
case HiveASTParser.TOK_PARTITIONINGSPEC:
hasPartSpec = true;
partIdx = i;
break;
case HiveASTParser.TOK_WINDOWRANGE:
case HiveASTParser.TOK_WINDOWVALUES:
hasWF = true;
wfIdx = i;
break;
}
}
HiveParserWindowingSpec.WindowSpec ws = new HiveParserWindowingSpec.WindowSpec();
if (hasSrcId) {
HiveParserASTNode nameNode = (HiveParserASTNode) node.getChild(srcIdIdx);
ws.setSourceId(nameNode.getText());
}
if (hasPartSpec) {
HiveParserASTNode partNode = (HiveParserASTNode) node.getChild(partIdx);
PartitioningSpec partitioning = processPTFPartitionSpec(partNode);
ws.setPartitioning(partitioning);
}
if (hasWF) {
HiveParserASTNode wfNode = (HiveParserASTNode) node.getChild(wfIdx);
HiveParserWindowingSpec.WindowFrameSpec wfSpec = processWindowFrame(wfNode);
ws.setWindowFrame(wfSpec);
}
return ws;
}
Aggregations