Search in sources :

Example 26 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project drill by axbaretto.

the class DrillAvgVarianceConvertlet method expandAvg.

private SqlNode expandAvg(final SqlNode arg) {
    final SqlParserPos pos = SqlParserPos.ZERO;
    final SqlNode sum = DrillCalciteSqlAggFunctionWrapper.SUM.createCall(pos, arg);
    final SqlNode count = SqlStdOperatorTable.COUNT.createCall(pos, arg);
    final SqlNode sumAsDouble = CastHighOp.createCall(pos, sum);
    return SqlStdOperatorTable.DIVIDE.createCall(pos, sumAsDouble, count);
}
Also used : SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlNode(org.apache.calcite.sql.SqlNode)

Example 27 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project flink by apache.

the class HiveParserSqlFunctionConverter method getUDFInfo.

private static CalciteUDFInfo getUDFInfo(String hiveUdfName, List<RelDataType> calciteArgTypes, RelDataType calciteRetType) {
    CalciteUDFInfo udfInfo = new CalciteUDFInfo();
    udfInfo.udfName = hiveUdfName;
    String[] nameParts = hiveUdfName.split("\\.");
    if (nameParts.length > 1) {
        udfInfo.identifier = new SqlIdentifier(Arrays.stream(nameParts).collect(Collectors.toList()), new SqlParserPos(0, 0));
    }
    udfInfo.returnTypeInference = ReturnTypes.explicit(calciteRetType);
    udfInfo.operandTypeInference = InferTypes.explicit(calciteArgTypes);
    List<SqlTypeFamily> typeFamily = new ArrayList<>();
    for (RelDataType argType : calciteArgTypes) {
        typeFamily.add(Util.first(argType.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
    }
    udfInfo.operandTypeChecker = OperandTypes.family(Collections.unmodifiableList(typeFamily));
    return udfInfo;
}
Also used : SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlTypeFamily(org.apache.calcite.sql.type.SqlTypeFamily) ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier)

Example 28 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project calcite by apache.

the class SqlTesterImpl method buildQuery2.

/**
 * Builds a query that extracts all literals as columns in an underlying
 * select.
 *
 * <p>For example,</p>
 *
 * <blockquote>{@code 1 < 5}</blockquote>
 *
 * <p>becomes</p>
 *
 * <blockquote>{@code SELECT p0 < p1
 * FROM (VALUES (1, 5)) AS t(p0, p1)}</blockquote>
 *
 * <p>Null literals don't have enough type information to be extracted.
 * We push down {@code CAST(NULL AS type)} but raw nulls such as
 * {@code CASE 1 WHEN 2 THEN 'a' ELSE NULL END} are left as is.</p>
 *
 * @param expression Scalar expression
 * @return Query that evaluates a scalar expression
 */
private String buildQuery2(String expression) {
    // "values (1 < 5)"
    // becomes
    // "select p0 < p1 from (values (1, 5)) as t(p0, p1)"
    SqlNode x;
    final String sql = "values (" + expression + ")";
    try {
        x = parseQuery(sql);
    } catch (SqlParseException e) {
        throw new RuntimeException(e);
    }
    final Collection<SqlNode> literalSet = new LinkedHashSet<>();
    x.accept(new SqlShuttle() {

        private final List<SqlOperator> ops = ImmutableList.of(SqlStdOperatorTable.LITERAL_CHAIN, SqlStdOperatorTable.LOCALTIME, SqlStdOperatorTable.LOCALTIMESTAMP, SqlStdOperatorTable.CURRENT_TIME, SqlStdOperatorTable.CURRENT_TIMESTAMP);

        @Override
        public SqlNode visit(SqlLiteral literal) {
            if (!isNull(literal) && literal.getTypeName() != SqlTypeName.SYMBOL) {
                literalSet.add(literal);
            }
            return literal;
        }

        @Override
        public SqlNode visit(SqlCall call) {
            final SqlOperator operator = call.getOperator();
            if (operator == SqlStdOperatorTable.CAST && isNull(call.operand(0))) {
                literalSet.add(call);
                return call;
            } else if (ops.contains(operator)) {
                // literal"
                return call;
            } else {
                return super.visit(call);
            }
        }

        private boolean isNull(SqlNode sqlNode) {
            return sqlNode instanceof SqlLiteral && ((SqlLiteral) sqlNode).getTypeName() == SqlTypeName.NULL;
        }
    });
    final List<SqlNode> nodes = new ArrayList<>(literalSet);
    Collections.sort(nodes, new Comparator<SqlNode>() {

        public int compare(SqlNode o1, SqlNode o2) {
            final SqlParserPos pos0 = o1.getParserPosition();
            final SqlParserPos pos1 = o2.getParserPosition();
            int c = -Utilities.compare(pos0.getLineNum(), pos1.getLineNum());
            if (c != 0) {
                return c;
            }
            return -Utilities.compare(pos0.getColumnNum(), pos1.getColumnNum());
        }
    });
    String sql2 = sql;
    final List<Pair<String, String>> values = new ArrayList<>();
    int p = 0;
    for (SqlNode literal : nodes) {
        final SqlParserPos pos = literal.getParserPosition();
        final int start = SqlParserUtil.lineColToIndex(sql, pos.getLineNum(), pos.getColumnNum());
        final int end = SqlParserUtil.lineColToIndex(sql, pos.getEndLineNum(), pos.getEndColumnNum()) + 1;
        String param = "p" + (p++);
        values.add(Pair.of(sql2.substring(start, end), param));
        sql2 = sql2.substring(0, start) + param + sql2.substring(end);
    }
    if (values.isEmpty()) {
        values.add(Pair.of("1", "p0"));
    }
    return "select " + sql2.substring("values (".length(), sql2.length() - 1) + " from (values (" + Util.commaList(Pair.left(values)) + ")) as t(" + Util.commaList(Pair.right(values)) + ")";
}
Also used : LinkedHashSet(java.util.LinkedHashSet) SqlShuttle(org.apache.calcite.sql.util.SqlShuttle) SqlParseException(org.apache.calcite.sql.parser.SqlParseException) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlOperator(org.apache.calcite.sql.SqlOperator) SqlCall(org.apache.calcite.sql.SqlCall) ArrayList(java.util.ArrayList) SqlLiteral(org.apache.calcite.sql.SqlLiteral) SqlNode(org.apache.calcite.sql.SqlNode) Pair(org.apache.calcite.util.Pair)

Example 29 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project calcite by apache.

the class DelegatingScope method fullyQualify.

/**
 * Converts an identifier into a fully-qualified identifier. For example,
 * the "empno" in "select empno from emp natural join dept" becomes
 * "emp.empno".
 *
 * <p>If the identifier cannot be resolved, throws. Never returns null.
 */
public SqlQualified fullyQualify(SqlIdentifier identifier) {
    if (identifier.isStar()) {
        return SqlQualified.create(this, 1, null, identifier);
    }
    final SqlIdentifier previous = identifier;
    final SqlNameMatcher nameMatcher = validator.catalogReader.nameMatcher();
    String columnName;
    final String tableName;
    final SqlValidatorNamespace namespace;
    switch(identifier.names.size()) {
        case 1:
            {
                columnName = identifier.names.get(0);
                final Map<String, ScopeChild> map = findQualifyingTableNames(columnName, identifier, nameMatcher);
                switch(map.size()) {
                    case 0:
                        if (nameMatcher.isCaseSensitive()) {
                            final SqlNameMatcher liberalMatcher = SqlNameMatchers.liberal();
                            final Map<String, ScopeChild> map2 = findQualifyingTableNames(columnName, identifier, liberalMatcher);
                            if (!map2.isEmpty()) {
                                final List<String> list = new ArrayList<>();
                                for (ScopeChild entry : map2.values()) {
                                    final RelDataTypeField field = liberalMatcher.field(entry.namespace.getRowType(), columnName);
                                    list.add(field.getName());
                                }
                                Collections.sort(list);
                                throw validator.newValidationError(identifier, RESOURCE.columnNotFoundDidYouMean(columnName, Util.sepList(list, "', '")));
                            }
                        }
                        throw validator.newValidationError(identifier, RESOURCE.columnNotFound(columnName));
                    case 1:
                        tableName = map.keySet().iterator().next();
                        namespace = map.get(tableName).namespace;
                        break;
                    default:
                        throw validator.newValidationError(identifier, RESOURCE.columnAmbiguous(columnName));
                }
                final ResolvedImpl resolved = new ResolvedImpl();
                resolveInNamespace(namespace, false, identifier.names, nameMatcher, Path.EMPTY, resolved);
                final RelDataTypeField field = nameMatcher.field(namespace.getRowType(), columnName);
                if (field != null) {
                    if (hasAmbiguousUnresolvedStar(namespace.getRowType(), field, columnName)) {
                        throw validator.newValidationError(identifier, RESOURCE.columnAmbiguous(columnName));
                    }
                    // use resolved field name
                    columnName = field.getName();
                }
                // todo: do implicit collation here
                final SqlParserPos pos = identifier.getParserPosition();
                identifier = new SqlIdentifier(ImmutableList.of(tableName, columnName), null, pos, ImmutableList.of(SqlParserPos.ZERO, pos));
            }
        // fall through
        default:
            {
                SqlValidatorNamespace fromNs = null;
                Path fromPath = null;
                RelDataType fromRowType = null;
                final ResolvedImpl resolved = new ResolvedImpl();
                int size = identifier.names.size();
                int i = size - 1;
                for (; i > 0; i--) {
                    final SqlIdentifier prefix = identifier.getComponent(0, i);
                    resolved.clear();
                    resolve(prefix.names, nameMatcher, false, resolved);
                    if (resolved.count() == 1) {
                        final Resolve resolve = resolved.only();
                        fromNs = resolve.namespace;
                        fromPath = resolve.path;
                        fromRowType = resolve.rowType();
                        break;
                    }
                    // Look for a table alias that is the wrong case.
                    if (nameMatcher.isCaseSensitive()) {
                        final SqlNameMatcher liberalMatcher = SqlNameMatchers.liberal();
                        resolved.clear();
                        resolve(prefix.names, liberalMatcher, false, resolved);
                        if (resolved.count() == 1) {
                            final Step lastStep = Util.last(resolved.only().path.steps());
                            throw validator.newValidationError(prefix, RESOURCE.tableNameNotFoundDidYouMean(prefix.toString(), lastStep.name));
                        }
                    }
                }
                if (fromNs == null || fromNs instanceof SchemaNamespace) {
                    // Look for a column not qualified by a table alias.
                    columnName = identifier.names.get(0);
                    final Map<String, ScopeChild> map = findQualifyingTableNames(columnName, identifier, nameMatcher);
                    switch(map.size()) {
                        default:
                            final SqlIdentifier prefix1 = identifier.skipLast(1);
                            throw validator.newValidationError(prefix1, RESOURCE.tableNameNotFound(prefix1.toString()));
                        case 1:
                            {
                                final Map.Entry<String, ScopeChild> entry = map.entrySet().iterator().next();
                                final String tableName2 = map.keySet().iterator().next();
                                fromNs = entry.getValue().namespace;
                                fromPath = Path.EMPTY;
                                // Adding table name is for RecordType column with StructKind.PEEK_FIELDS or
                                // StructKind.PEEK_FIELDS only. Access to a field in a RecordType column of
                                // other StructKind should always be qualified with table name.
                                final RelDataTypeField field = nameMatcher.field(fromNs.getRowType(), columnName);
                                if (field != null) {
                                    switch(field.getType().getStructKind()) {
                                        case PEEK_FIELDS:
                                        case PEEK_FIELDS_DEFAULT:
                                        case PEEK_FIELDS_NO_EXPAND:
                                            // use resolved field name
                                            columnName = field.getName();
                                            resolve(ImmutableList.of(tableName2), nameMatcher, false, resolved);
                                            if (resolved.count() == 1) {
                                                final Resolve resolve = resolved.only();
                                                fromNs = resolve.namespace;
                                                fromPath = resolve.path;
                                                fromRowType = resolve.rowType();
                                                identifier = identifier.setName(0, columnName).add(0, tableName2, SqlParserPos.ZERO);
                                                ++i;
                                                ++size;
                                            }
                                            break;
                                        default:
                                            // Throw an error if the table was not found.
                                            // If one or more of the child namespaces allows peeking
                                            // (e.g. if they are Phoenix column families) then we relax the SQL
                                            // standard requirement that record fields are qualified by table alias.
                                            final SqlIdentifier prefix = identifier.skipLast(1);
                                            throw validator.newValidationError(prefix, RESOURCE.tableNameNotFound(prefix.toString()));
                                    }
                                }
                            }
                    }
                }
                // change "e.empno" to "E.empno".
                if (fromNs.getEnclosingNode() != null && !(this instanceof MatchRecognizeScope)) {
                    String alias = SqlValidatorUtil.getAlias(fromNs.getEnclosingNode(), -1);
                    if (alias != null && i > 0 && !alias.equals(identifier.names.get(i - 1))) {
                        identifier = identifier.setName(i - 1, alias);
                    }
                }
                if (fromPath.stepCount() > 1) {
                    assert fromRowType != null;
                    for (Step p : fromPath.steps()) {
                        fromRowType = fromRowType.getFieldList().get(p.i).getType();
                    }
                    ++i;
                }
                final SqlIdentifier suffix = identifier.getComponent(i, size);
                resolved.clear();
                resolveInNamespace(fromNs, false, suffix.names, nameMatcher, Path.EMPTY, resolved);
                final Path path;
                switch(resolved.count()) {
                    case 0:
                        // Maybe the last component was correct, just wrong case
                        if (nameMatcher.isCaseSensitive()) {
                            SqlNameMatcher liberalMatcher = SqlNameMatchers.liberal();
                            resolved.clear();
                            resolveInNamespace(fromNs, false, suffix.names, liberalMatcher, Path.EMPTY, resolved);
                            if (resolved.count() > 0) {
                                int k = size - 1;
                                final SqlIdentifier prefix = identifier.getComponent(0, i);
                                final SqlIdentifier suffix3 = identifier.getComponent(i, k + 1);
                                final Step step = Util.last(resolved.resolves.get(0).path.steps());
                                throw validator.newValidationError(suffix3, RESOURCE.columnNotFoundInTableDidYouMean(suffix3.toString(), prefix.toString(), step.name));
                            }
                        }
                        // Find the shortest suffix that also fails. Suppose we cannot resolve
                        // "a.b.c"; we find we cannot resolve "a.b" but can resolve "a". So,
                        // the error will be "Column 'a.b' not found".
                        int k = size - 1;
                        for (; k > i; --k) {
                            SqlIdentifier suffix2 = identifier.getComponent(i, k);
                            resolved.clear();
                            resolveInNamespace(fromNs, false, suffix2.names, nameMatcher, Path.EMPTY, resolved);
                            if (resolved.count() > 0) {
                                break;
                            }
                        }
                        final SqlIdentifier prefix = identifier.getComponent(0, i);
                        final SqlIdentifier suffix3 = identifier.getComponent(i, k + 1);
                        throw validator.newValidationError(suffix3, RESOURCE.columnNotFoundInTable(suffix3.toString(), prefix.toString()));
                    case 1:
                        path = resolved.only().path;
                        break;
                    default:
                        final Comparator<Resolve> c = new Comparator<Resolve>() {

                            public int compare(Resolve o1, Resolve o2) {
                                // Name resolution that uses fewer implicit steps wins.
                                int c = Integer.compare(worstKind(o1.path), worstKind(o2.path));
                                if (c != 0) {
                                    return c;
                                }
                                // Shorter path wins
                                return Integer.compare(o1.path.stepCount(), o2.path.stepCount());
                            }

                            private int worstKind(Path path) {
                                int kind = -1;
                                for (Step step : path.steps()) {
                                    kind = Math.max(kind, step.kind.ordinal());
                                }
                                return kind;
                            }
                        };
                        Collections.sort(resolved.resolves, c);
                        if (c.compare(resolved.resolves.get(0), resolved.resolves.get(1)) == 0) {
                            throw validator.newValidationError(suffix, RESOURCE.columnAmbiguous(suffix.toString()));
                        }
                        path = resolved.resolves.get(0).path;
                }
                // Normalize case to match definition, make elided fields explicit,
                // and check that references to dynamic stars ("**") are unambiguous.
                int k = i;
                for (Step step : path.steps()) {
                    final String name = identifier.names.get(k);
                    if (step.i < 0) {
                        throw validator.newValidationError(identifier, RESOURCE.columnNotFound(name));
                    }
                    final RelDataTypeField field0 = step.rowType.getFieldList().get(step.i);
                    final String fieldName = field0.getName();
                    switch(step.kind) {
                        case PEEK_FIELDS:
                        case PEEK_FIELDS_DEFAULT:
                        case PEEK_FIELDS_NO_EXPAND:
                            identifier = identifier.add(k, fieldName, SqlParserPos.ZERO);
                            break;
                        default:
                            if (!fieldName.equals(name)) {
                                identifier = identifier.setName(k, fieldName);
                            }
                            if (hasAmbiguousUnresolvedStar(step.rowType, field0, name)) {
                                throw validator.newValidationError(identifier, RESOURCE.columnAmbiguous(name));
                            }
                    }
                    ++k;
                }
                // CustomResolvingTable.
                if (identifier.names.size() > k) {
                    identifier = identifier.getComponent(0, k);
                }
                if (i > 1) {
                    // Simplify overqualified identifiers.
                    // For example, schema.emp.deptno becomes emp.deptno.
                    // 
                    // It is safe to convert schema.emp or database.schema.emp to emp
                    // because it would not have resolved if the FROM item had an alias. The
                    // following query is invalid:
                    // SELECT schema.emp.deptno FROM schema.emp AS e
                    identifier = identifier.getComponent(i - 1, identifier.names.size());
                }
                if (!previous.equals(identifier)) {
                    validator.setOriginal(identifier, previous);
                }
                return SqlQualified.create(this, i, fromNs, identifier);
            }
    }
}
Also used : SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) Comparator(java.util.Comparator) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Map(java.util.Map)

Example 30 with SqlParserPos

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos in project calcite by apache.

the class SqlAdvisor method getQualifiedName.

/**
 * Gets the fully qualified name for a {@link SqlIdentifier} at a given
 * position of a sql statement.
 *
 * @param sql    A syntactically correct sql statement for which to retrieve a
 *               fully qualified SQL identifier name
 * @param cursor to indicate the 0-based cursor position in the query that
 *               represents a SQL identifier for which its fully qualified
 *               name is to be returned.
 * @return a {@link SqlMoniker} that contains the fully qualified name of
 * the specified SQL identifier, returns null if none is found or the SQL
 * statement is invalid.
 */
public SqlMoniker getQualifiedName(String sql, int cursor) {
    SqlNode sqlNode;
    try {
        sqlNode = parseQuery(sql);
        validator.validate(sqlNode);
    } catch (Exception e) {
        return null;
    }
    SqlParserPos pos = new SqlParserPos(1, cursor + 1);
    try {
        return validator.lookupQualifiedName(sqlNode, pos);
    } catch (CalciteContextException e) {
        return null;
    } catch (java.lang.AssertionError e) {
        return null;
    }
}
Also used : CalciteContextException(org.apache.calcite.runtime.CalciteContextException) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) CalciteContextException(org.apache.calcite.runtime.CalciteContextException) CalciteException(org.apache.calcite.runtime.CalciteException) SqlParseException(org.apache.calcite.sql.parser.SqlParseException) SqlNode(org.apache.calcite.sql.SqlNode)

Aggregations

SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)42 SqlNode (org.apache.calcite.sql.SqlNode)17 SqlIntervalQualifier (org.apache.calcite.sql.SqlIntervalQualifier)11 RelDataType (org.apache.calcite.rel.type.RelDataType)9 SqlIdentifier (org.apache.calcite.sql.SqlIdentifier)9 BigDecimal (java.math.BigDecimal)8 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)6 SqlCall (org.apache.calcite.sql.SqlCall)5 ArrayList (java.util.ArrayList)4 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)4 SqlNodeList (org.apache.calcite.sql.SqlNodeList)4 ImmutableList (com.google.common.collect.ImmutableList)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 RexNode (org.apache.calcite.rex.RexNode)3 SqlNumericLiteral (org.apache.calcite.sql.SqlNumericLiteral)3 SqlOperator (org.apache.calcite.sql.SqlOperator)3 Calendar (java.util.Calendar)2 IdentityHashMap (java.util.IdentityHashMap)2 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)2