Search in sources :

Example 46 with SqlOperator

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project drill by apache.

the class PreProcessLogicalRel method getConvertFunctionException.

private UserException getConvertFunctionException(final String functionName, final String typeName) {
    final String newFunctionName = functionName + typeName;
    final String typeNameToPrint = typeName.length() == 0 ? "<empty_string>" : typeName;
    final UserException.Builder exceptionBuilder = UserException.unsupportedError().message("%s does not support conversion %s type '%s'.", functionName, functionName.substring(8).toLowerCase(), typeNameToPrint);
    // Build a nice error message
    if (typeName.length() > 0) {
        List<String> ops = new ArrayList<>();
        for (SqlOperator op : table.getOperatorList()) {
            ops.add(op.getName());
        }
        final String bestMatch = ApproximateStringMatcher.getBestMatch(ops, newFunctionName);
        if (bestMatch != null && bestMatch.length() > functionName.length() && bestMatch.toLowerCase().startsWith("convert")) {
            final StringBuilder s = new StringBuilder("Did you mean ").append(bestMatch.substring(functionName.length())).append("?");
            exceptionBuilder.addContext(s.toString());
        }
    }
    return exceptionBuilder.build(logger);
}
Also used : SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) NlsString(org.apache.calcite.util.NlsString) UserException(org.apache.drill.common.exceptions.UserException)

Example 47 with SqlOperator

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project drill by apache.

the class LocalFunctionRegistry method registerOperatorsWithoutInference.

private void registerOperatorsWithoutInference(DrillOperatorTable operatorTable, Map<String, Collection<DrillFuncHolder>> registeredFunctions) {
    SqlOperator op;
    for (Entry<String, Collection<DrillFuncHolder>> function : registeredFunctions.entrySet()) {
        Set<Integer> argCounts = new HashSet<>();
        String name = function.getKey().toUpperCase();
        for (DrillFuncHolder func : function.getValue()) {
            if (argCounts.add(func.getParamCount())) {
                if (func.isAggregating()) {
                    op = new DrillSqlAggOperatorWithoutInference(name, func.getParamCount(), func.isVarArg());
                } else {
                    boolean isDeterministic;
                    // into literals
                    if (DrillConstExecutor.NON_REDUCIBLE_TYPES.contains(func.getReturnType().getMinorType()) || func.isComplexWriterFuncHolder()) {
                        isDeterministic = false;
                    } else {
                        isDeterministic = func.isDeterministic();
                    }
                    op = new DrillSqlOperatorWithoutInference(name, func.getParamCount(), func.getReturnType(), isDeterministic, func.isNiladic(), func.isVarArg());
                }
                operatorTable.addOperatorWithoutInference(function.getKey(), op);
            }
        }
    }
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DrillFuncHolder(org.apache.drill.exec.expr.fn.DrillFuncHolder) DrillSqlAggOperatorWithoutInference(org.apache.drill.exec.planner.sql.DrillSqlAggOperatorWithoutInference) DrillSqlOperator(org.apache.drill.exec.planner.sql.DrillSqlOperator) SqlOperator(org.apache.calcite.sql.SqlOperator) DrillSqlOperatorWithoutInference(org.apache.drill.exec.planner.sql.DrillSqlOperatorWithoutInference) Collection(java.util.Collection) HashSet(java.util.HashSet)

Example 48 with SqlOperator

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project drill by apache.

the class RewriteAsBinaryOperators method visitCall.

@Override
public RexNode visitCall(RexCall call) {
    SqlOperator op = call.getOperator();
    SqlKind kind = op.getKind();
    RelDataType type = call.getType();
    if (kind == SqlKind.OR || kind == SqlKind.AND) {
        if (call.getOperands().size() > 2) {
            List<RexNode> children = new ArrayList<>(call.getOperands());
            RexNode left = children.remove(0).accept(this);
            RexNode right = builder.makeCall(type, op, children).accept(this);
            return builder.makeCall(type, op, ImmutableList.of(left, right));
        }
    }
    return builder.makeCall(type, op, visitChildren(call));
}
Also used : SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlKind(org.apache.calcite.sql.SqlKind) RexNode(org.apache.calcite.rex.RexNode)

Example 49 with SqlOperator

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.

the class HiveParserCalcitePlanner method genUDTFPlan.

private RelNode genUDTFPlan(SqlOperator sqlOperator, String genericUDTFName, String outputTableAlias, List<String> colAliases, HiveParserQB qb, List<RexNode> operands, List<ColumnInfo> opColInfos, RelNode input, boolean inSelect, boolean isOuter) throws SemanticException {
    Preconditions.checkState(!isOuter || !inSelect, "OUTER is not supported for SELECT UDTF");
    // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    if (inSelect && !qbp.getDestToGroupBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToDistributeBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToSortBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToClusterBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
    }
    if (inSelect && !qbp.getAliasToLateralViews().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
    }
    LOG.debug("Table alias: " + outputTableAlias + " Col aliases: " + colAliases);
    // Create the object inspector for the input columns and initialize the UDTF
    RelDataType relDataType = HiveParserUtils.inferReturnTypeForOperands(sqlOperator, operands, cluster.getTypeFactory());
    DataType dataType = HiveParserUtils.toDataType(relDataType);
    StructObjectInspector outputOI = (StructObjectInspector) HiveInspectors.getObjectInspector(HiveTypeUtil.toHiveTypeInfo(dataType, false));
    // this should only happen for select udtf
    if (outputTableAlias == null) {
        Preconditions.checkState(inSelect, "Table alias not specified for lateral view");
        String prefix = "select_" + genericUDTFName + "_alias_";
        int i = 0;
        while (qb.getAliases().contains(prefix + i)) {
            i++;
        }
        outputTableAlias = prefix + i;
    }
    if (colAliases.isEmpty()) {
        // user did not specify alias names, infer names from outputOI
        for (StructField field : outputOI.getAllStructFieldRefs()) {
            colAliases.add(field.getFieldName());
        }
    }
    // Make sure that the number of column aliases in the AS clause matches the number of
    // columns output by the UDTF
    int numOutputCols = outputOI.getAllStructFieldRefs().size();
    int numSuppliedAliases = colAliases.size();
    if (numOutputCols != numSuppliedAliases) {
        throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg("expected " + numOutputCols + " aliases " + "but got " + numSuppliedAliases));
    }
    // Generate the output column info's / row resolver using internal names.
    ArrayList<ColumnInfo> udtfOutputCols = new ArrayList<>();
    Iterator<String> colAliasesIter = colAliases.iterator();
    for (StructField sf : outputOI.getAllStructFieldRefs()) {
        String colAlias = colAliasesIter.next();
        assert (colAlias != null);
        // Since the UDTF operator feeds into a LVJ operator that will rename all the internal
        // names,
        // we can just use field name from the UDTF's OI as the internal name
        ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), outputTableAlias, false);
        udtfOutputCols.add(col);
    }
    // Create the row resolver for the table function scan
    HiveParserRowResolver udtfOutRR = new HiveParserRowResolver();
    for (int i = 0; i < udtfOutputCols.size(); i++) {
        udtfOutRR.put(outputTableAlias, colAliases.get(i), udtfOutputCols.get(i));
    }
    // Build row type from field <type, name>
    RelDataType retType = HiveParserTypeConverter.getType(cluster, udtfOutRR, null);
    List<RelDataType> argTypes = new ArrayList<>();
    RelDataTypeFactory dtFactory = cluster.getRexBuilder().getTypeFactory();
    for (ColumnInfo ci : opColInfos) {
        argTypes.add(HiveParserUtils.toRelDataType(ci.getType(), dtFactory));
    }
    SqlOperator calciteOp = HiveParserSqlFunctionConverter.getCalciteFn(genericUDTFName, argTypes, retType, false);
    RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, operands);
    // convert the rex call
    TableFunctionConverter udtfConverter = new TableFunctionConverter(cluster, input, frameworkConfig.getOperatorTable(), catalogReader.nameMatcher());
    RexCall convertedCall = (RexCall) rexNode.accept(udtfConverter);
    SqlOperator convertedOperator = convertedCall.getOperator();
    Preconditions.checkState(convertedOperator instanceof SqlUserDefinedTableFunction, "Expect operator to be " + SqlUserDefinedTableFunction.class.getSimpleName() + ", actually got " + convertedOperator.getClass().getSimpleName());
    // TODO: how to decide this?
    Type elementType = Object[].class;
    // create LogicalTableFunctionScan
    RelNode tableFunctionScan = LogicalTableFunctionScan.create(input.getCluster(), Collections.emptyList(), convertedCall, elementType, retType, null);
    // remember the table alias for the UDTF so that we can reference the cols later
    qb.addAlias(outputTableAlias);
    RelNode correlRel;
    RexBuilder rexBuilder = cluster.getRexBuilder();
    // find correlation in the converted call
    Pair<List<CorrelationId>, ImmutableBitSet> correlUse = getCorrelationUse(convertedCall);
    // create correlate node
    if (correlUse == null) {
        correlRel = plannerContext.createRelBuilder(catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase()).push(input).push(tableFunctionScan).join(isOuter ? JoinRelType.LEFT : JoinRelType.INNER, rexBuilder.makeLiteral(true)).build();
    } else {
        if (correlUse.left.size() > 1) {
            tableFunctionScan = DeduplicateCorrelateVariables.go(rexBuilder, correlUse.left.get(0), Util.skip(correlUse.left), tableFunctionScan);
        }
        correlRel = LogicalCorrelate.create(input, tableFunctionScan, correlUse.left.get(0), correlUse.right, isOuter ? JoinRelType.LEFT : JoinRelType.INNER);
    }
    // Add new rel & its RR to the maps
    relToHiveColNameCalcitePosMap.put(tableFunctionScan, buildHiveToCalciteColumnMap(udtfOutRR));
    relToRowResolver.put(tableFunctionScan, udtfOutRR);
    HiveParserRowResolver correlRR = HiveParserRowResolver.getCombinedRR(relToRowResolver.get(input), relToRowResolver.get(tableFunctionScan));
    relToHiveColNameCalcitePosMap.put(correlRel, buildHiveToCalciteColumnMap(correlRR));
    relToRowResolver.put(correlRel, correlRR);
    if (!inSelect) {
        return correlRel;
    }
    // create project node
    List<RexNode> projects = new ArrayList<>();
    HiveParserRowResolver projectRR = new HiveParserRowResolver();
    int j = 0;
    for (int i = input.getRowType().getFieldCount(); i < correlRel.getRowType().getFieldCount(); i++) {
        projects.add(cluster.getRexBuilder().makeInputRef(correlRel, i));
        ColumnInfo inputColInfo = correlRR.getRowSchema().getSignature().get(i);
        String colAlias = inputColInfo.getAlias();
        ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(j++), inputColInfo.getObjectInspector(), null, false);
        projectRR.put(null, colAlias, colInfo);
    }
    RelNode projectNode = LogicalProject.create(correlRel, Collections.emptyList(), projects, tableFunctionScan.getRowType());
    relToHiveColNameCalcitePosMap.put(projectNode, buildHiveToCalciteColumnMap(projectRR));
    relToRowResolver.put(projectNode, projectRR);
    return projectNode;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) ArrayList(java.util.ArrayList) CompositeList(org.apache.calcite.util.CompositeList) List(java.util.List) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DataType(org.apache.flink.table.types.DataType) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) RelDataType(org.apache.calcite.rel.type.RelDataType) JoinRelType(org.apache.calcite.rel.core.JoinRelType) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) Type(java.lang.reflect.Type) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) RexNode(org.apache.calcite.rex.RexNode)

Example 50 with SqlOperator

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project flink by apache.

the class HiveParserRexNodeConverter method convertGenericFunc.

private RexNode convertGenericFunc(ExprNodeGenericFuncDesc func) throws SemanticException {
    ExprNodeDesc tmpExprNode;
    RexNode tmpRN;
    List<RexNode> childRexNodeLst = new ArrayList<>();
    List<RelDataType> argTypes = new ArrayList<>();
    // TODO: 1) Expand to other functions as needed 2) What about types other than primitive.
    TypeInfo tgtDT = null;
    GenericUDF tgtUdf = func.getGenericUDF();
    if (tgtUdf instanceof GenericUDFIn) {
        return convertIN(func);
    }
    boolean isNumeric = isNumericBinary(func);
    boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare;
    boolean isWhenCase = tgtUdf instanceof GenericUDFWhen || tgtUdf instanceof GenericUDFCase;
    boolean isTransformableTimeStamp = func.getGenericUDF() instanceof GenericUDFUnixTimeStamp && func.getChildren().size() != 0;
    if (isNumeric) {
        tgtDT = func.getTypeInfo();
        assert func.getChildren().size() == 2;
    // TODO: checking 2 children is useless, compare already does that.
    } else if (isCompare && (func.getChildren().size() == 2)) {
        tgtDT = FunctionRegistry.getCommonClassForComparison(func.getChildren().get(0).getTypeInfo(), func.getChildren().get(1).getTypeInfo());
    } else if (isWhenCase) {
        // functions as they are not allowed
        if (checkForStatefulFunctions(func.getChildren())) {
            throw new SemanticException("Stateful expressions cannot be used inside of CASE");
        }
    } else if (isTransformableTimeStamp) {
        func = ExprNodeGenericFuncDesc.newInstance(new GenericUDFToUnixTimeStamp(), func.getChildren());
    }
    for (ExprNodeDesc childExpr : func.getChildren()) {
        tmpExprNode = childExpr;
        if (tgtDT != null && TypeInfoUtils.isConversionRequiredForComparison(tgtDT, childExpr.getTypeInfo())) {
            if (isCompare) {
                // For compare, we will convert requisite children
                tmpExprNode = HiveASTParseUtils.createConversionCast(childExpr, (PrimitiveTypeInfo) tgtDT);
            } else if (isNumeric) {
                // For numeric, we'll do minimum necessary cast - if we cast to the type
                // of expression, bad things will happen.
                PrimitiveTypeInfo minArgType = HiveParserExprNodeDescUtils.deriveMinArgumentCast(childExpr, tgtDT);
                tmpExprNode = HiveASTParseUtils.createConversionCast(childExpr, minArgType);
            } else {
                throw new AssertionError("Unexpected " + tgtDT + " - not a numeric op or compare");
            }
        }
        argTypes.add(HiveParserTypeConverter.convert(tmpExprNode.getTypeInfo(), cluster.getTypeFactory()));
        tmpRN = convert(tmpExprNode);
        childRexNodeLst.add(tmpRN);
    }
    // process the function
    RelDataType retType = HiveParserTypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
    SqlOperator calciteOp = HiveParserSqlFunctionConverter.getCalciteOperator(func.getFuncText(), func.getGenericUDF(), argTypes, retType);
    if (calciteOp.getKind() == SqlKind.CASE) {
        // If it is a case operator, we need to rewrite it
        childRexNodeLst = rewriteCaseChildren(func, childRexNodeLst);
    }
    RexNode expr = cluster.getRexBuilder().makeCall(calciteOp, childRexNodeLst);
    // check whether we need a calcite cast
    RexNode cast = handleExplicitCast(func, childRexNodeLst, ((RexCall) expr).getOperator());
    if (cast != null) {
        expr = cast;
        retType = cast.getType();
    }
    // an exception
    if (flattenExpr && expr instanceof RexCall && !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) {
        RexCall call = (RexCall) expr;
        expr = cluster.getRexBuilder().makeCall(retType, call.getOperator(), RexUtil.flatten(call.getOperands(), call.getOperator()));
    }
    return expr;
}
Also used : GenericUDFCase(org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase) SqlCastFunction(org.apache.calcite.sql.fun.SqlCastFunction) SqlOperator(org.apache.calcite.sql.SqlOperator) GenericUDFWhen(org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen) ArrayList(java.util.ArrayList) GenericUDFToUnixTimeStamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp) RelDataType(org.apache.calcite.rel.type.RelDataType) GenericUDFUnixTimeStamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RexCall(org.apache.calcite.rex.RexCall) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) RexNode(org.apache.calcite.rex.RexNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

SqlOperator (org.apache.calcite.sql.SqlOperator)129 ArrayList (java.util.ArrayList)44 RexNode (org.apache.calcite.rex.RexNode)41 RelDataType (org.apache.calcite.rel.type.RelDataType)25 SqlCall (org.apache.calcite.sql.SqlCall)24 RexCall (org.apache.calcite.rex.RexCall)21 SqlNode (org.apache.calcite.sql.SqlNode)21 SqlKind (org.apache.calcite.sql.SqlKind)15 List (java.util.List)13 SqlFunction (org.apache.calcite.sql.SqlFunction)13 RelNode (org.apache.calcite.rel.RelNode)12 RexBuilder (org.apache.calcite.rex.RexBuilder)11 RexInputRef (org.apache.calcite.rex.RexInputRef)11 NlsString (org.apache.calcite.util.NlsString)11 Test (org.junit.Test)11 SqlBasicCall (org.apache.calcite.sql.SqlBasicCall)10 SqlIdentifier (org.apache.calcite.sql.SqlIdentifier)10 RexNode (org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexNode)9 SqlOperator (org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator)9 RexLiteral (org.apache.calcite.rex.RexLiteral)9