use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.
the class HiveParserUtils method getSqlOperator.
public static SqlOperator getSqlOperator(String funcName, SqlOperatorTable opTable, SqlFunctionCategory category) {
funcName = funcName.toLowerCase();
String[] names = funcName.split("\\.");
SqlIdentifier identifier = new SqlIdentifier(Arrays.asList(names), SqlParserPos.ZERO);
List<SqlOperator> operators = new ArrayList<>();
try {
opTable.lookupOperatorOverloads(identifier, category, SqlSyntax.FUNCTION, operators, SqlNameMatchers.withCaseSensitive(false));
} catch (Exception e) {
LOG.warn("Error trying to resolve function " + funcName, e);
}
if (operators.isEmpty()) {
return null;
} else {
return operators.get(0);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.
the class HiveParserUtils method getGenericUDAFEvaluator.
// Returns the GenericUDAFEvaluator for the aggregation. This is called once for each GroupBy
// aggregation.
// TODO: Requiring a GenericUDAFEvaluator means we only support hive UDAFs. Need to avoid this
// to support flink UDAFs.
public static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName, ArrayList<ExprNodeDesc> aggParameters, HiveParserASTNode aggTree, boolean isDistinct, boolean isAllColumns, SqlOperatorTable opTable) throws SemanticException {
ArrayList<ObjectInspector> originalParameterTypeInfos = getWritableObjectInspector(aggParameters);
GenericUDAFEvaluator result = FunctionRegistry.getGenericUDAFEvaluator(aggName, originalParameterTypeInfos, isDistinct, isAllColumns);
if (result == null) {
// this happens for temp functions
SqlOperator sqlOperator = getSqlOperator(aggName, opTable, SqlFunctionCategory.USER_DEFINED_FUNCTION);
if (sqlOperator instanceof HiveAggSqlFunction) {
HiveGenericUDAF hiveGenericUDAF = (HiveGenericUDAF) ((HiveAggSqlFunction) sqlOperator).makeFunction(new Object[0], new LogicalType[0]);
result = hiveGenericUDAF.createEvaluator(originalParameterTypeInfos.toArray(new ObjectInspector[0]));
}
}
if (null == result) {
String reason = "Looking for UDAF Evaluator\"" + aggName + "\" with parameters " + originalParameterTypeInfos;
throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.INVALID_FUNCTION_SIGNATURE, aggTree.getChild(0), reason));
}
return result;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.
the class HiveParserUtils method extractLateralViewInfo.
// extracts useful information for a given lateral view node
public static LateralViewInfo extractLateralViewInfo(HiveParserASTNode lateralView, HiveParserRowResolver inputRR, HiveParserSemanticAnalyzer hiveAnalyzer, FrameworkConfig frameworkConfig, RelOptCluster cluster) throws SemanticException {
// checks the left sub-tree
HiveParserASTNode sel = (HiveParserASTNode) lateralView.getChild(0);
Preconditions.checkArgument(sel.getToken().getType() == HiveASTParser.TOK_SELECT);
Preconditions.checkArgument(sel.getChildCount() == 1);
HiveParserASTNode selExpr = (HiveParserASTNode) sel.getChild(0);
Preconditions.checkArgument(selExpr.getToken().getType() == HiveASTParser.TOK_SELEXPR);
// decide function name and function
HiveParserASTNode func = (HiveParserASTNode) selExpr.getChild(0);
Preconditions.checkArgument(func.getToken().getType() == HiveASTParser.TOK_FUNCTION);
String funcName = getFunctionText(func, true);
SqlOperator sqlOperator = getSqlOperator(funcName, frameworkConfig.getOperatorTable(), SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION);
Preconditions.checkArgument(isUDTF(sqlOperator), funcName + " is not a valid UDTF");
// decide operands
List<ExprNodeDesc> operands = new ArrayList<>(func.getChildCount() - 1);
List<ColumnInfo> operandColInfos = new ArrayList<>(func.getChildCount() - 1);
HiveParserTypeCheckCtx typeCheckCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
for (int i = 1; i < func.getChildCount(); i++) {
ExprNodeDesc exprDesc = hiveAnalyzer.genExprNodeDesc((HiveParserASTNode) func.getChild(i), inputRR, typeCheckCtx);
operands.add(exprDesc);
operandColInfos.add(new ColumnInfo(getColumnInternalName(i - 1), exprDesc.getWritableObjectInspector(), null, false));
}
// decide table alias -- there must be a table alias
HiveParserASTNode tabAliasNode = (HiveParserASTNode) selExpr.getChild(selExpr.getChildCount() - 1);
Preconditions.checkArgument(tabAliasNode.getToken().getType() == HiveASTParser.TOK_TABALIAS);
String tabAlias = unescapeIdentifier(tabAliasNode.getChild(0).getText().toLowerCase());
// decide column aliases -- column aliases are optional
List<String> colAliases = new ArrayList<>();
for (int i = 1; i < selExpr.getChildCount() - 1; i++) {
HiveParserASTNode child = (HiveParserASTNode) selExpr.getChild(i);
Preconditions.checkArgument(child.getToken().getType() == HiveASTParser.Identifier);
colAliases.add(unescapeIdentifier(child.getText().toLowerCase()));
}
return new LateralViewInfo(funcName, sqlOperator, operands, operandColInfos, colAliases, tabAlias);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.
the class TableFunctionConverter method visitCall.
@Override
public RexNode visitCall(RexCall call) {
SqlOperator operator = call.getOperator();
if (isHiveCalciteSqlFn(operator)) {
// explicitly use USER_DEFINED_TABLE_FUNCTION since Hive can set USER_DEFINED_FUNCTION
// for UDTF
SqlOperator convertedOperator = convertOperator(operator, SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION);
List<RexNode> convertedOperands = new ArrayList<>();
RelDataTypeFactory.Builder dataTypeBuilder = cluster.getTypeFactory().builder();
dataTypeBuilder.addAll(leftRel.getRowType().getFieldList());
dataTypeBuilder.addAll(call.getType().getFieldList());
RelDataType correlType = dataTypeBuilder.uniquify().build();
InputRefConverter inputRefConverter = new InputRefConverter(correlType, cluster);
for (RexNode operand : call.getOperands()) {
convertedOperands.add(operand.accept(inputRefConverter));
}
// create RexCall
return builder.makeCall(call.getType(), convertedOperator, convertedOperands);
}
return super.visitCall(call);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.
the class HiveParserSemanticAnalyzer method doPhase1GetAllAggregations.
// DFS-scan the expressionTree to find all aggregation subtrees and put them in aggregations.
private void doPhase1GetAllAggregations(HiveParserASTNode expressionTree, HashMap<String, HiveParserASTNode> aggregations, List<HiveParserASTNode> wdwFns) throws SemanticException {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveASTParser.TOK_SUBQUERY_EXPR) {
// we don't want to include aggregate from within subquery
return;
}
if (exprTokenType == HiveASTParser.TOK_FUNCTION || exprTokenType == HiveASTParser.TOK_FUNCTIONDI || exprTokenType == HiveASTParser.TOK_FUNCTIONSTAR) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(expressionTree.getChildCount() - 1).getType() == HiveASTParser.TOK_WINDOWSPEC) {
// If it is a windowing spec, we include it in the list
// Further, we will examine its children AST nodes to check whether there are
// aggregation functions within
wdwFns.add(expressionTree);
doPhase1GetAllAggregations((HiveParserASTNode) expressionTree.getChild(expressionTree.getChildCount() - 1), aggregations, wdwFns);
return;
}
if (expressionTree.getChild(0).getType() == HiveASTParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0).getText());
SqlOperator sqlOperator = HiveParserUtils.getAnySqlOperator(functionName, frameworkConfig.getOperatorTable());
if (sqlOperator == null) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
}
if (FunctionRegistry.impliesOrder(functionName)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
if (HiveParserUtils.isUDAF(sqlOperator)) {
if (containsLeadLagUDF(expressionTree)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
aggregations.put(expressionTree.toStringTree(), expressionTree);
if (!HiveParserUtils.isNative(sqlOperator)) {
unparseTranslator.addIdentifierTranslation((HiveParserASTNode) expressionTree.getChild(0));
}
return;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
doPhase1GetAllAggregations((HiveParserASTNode) expressionTree.getChild(i), aggregations, wdwFns);
}
}
Aggregations