Search in sources :

Example 6 with RelConversionException

use of org.apache.calcite.tools.RelConversionException in project druid by druid-io.

the class DruidPlanner method plan.

public PlannerResult plan(final String sql) throws SqlParseException, ValidationException, RelConversionException {
    SqlExplain explain = null;
    SqlNode parsed = planner.parse(sql);
    if (parsed.getKind() == SqlKind.EXPLAIN) {
        explain = (SqlExplain) parsed;
        parsed = explain.getExplicandum();
    }
    final SqlNode validated = planner.validate(parsed);
    final RelRoot root = planner.rel(validated);
    try {
        return planWithDruidConvention(explain, root);
    } catch (RelOptPlanner.CannotPlanException e) {
        // Try again with BINDABLE convention. Used for querying Values, metadata tables, and fallback.
        try {
            return planWithBindableConvention(explain, root);
        } catch (Exception e2) {
            e.addSuppressed(e2);
            throw e;
        }
    }
}
Also used : SqlExplain(org.apache.calcite.sql.SqlExplain) RelRoot(org.apache.calcite.rel.RelRoot) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) ValidationException(org.apache.calcite.tools.ValidationException) RelConversionException(org.apache.calcite.tools.RelConversionException) SqlParseException(org.apache.calcite.sql.parser.SqlParseException) SqlNode(org.apache.calcite.sql.SqlNode)

Example 7 with RelConversionException

use of org.apache.calcite.tools.RelConversionException in project drill by axbaretto.

the class DescribeTableHandler method rewrite.

/**
 * Rewrite the parse tree as SELECT ... FROM INFORMATION_SCHEMA.COLUMNS ...
 */
@Override
public SqlNode rewrite(SqlNode sqlNode) throws RelConversionException, ForemanSetupException {
    DrillSqlDescribeTable node = unwrap(sqlNode, DrillSqlDescribeTable.class);
    try {
        List<SqlNode> selectList = ImmutableList.of((SqlNode) new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), new SqlIdentifier(COLS_COL_DATA_TYPE, SqlParserPos.ZERO), new SqlIdentifier(COLS_COL_IS_NULLABLE, SqlParserPos.ZERO));
        SqlNode fromClause = new SqlIdentifier(ImmutableList.of(IS_SCHEMA_NAME, TAB_COLUMNS), null, SqlParserPos.ZERO, null);
        final SqlIdentifier table = node.getTable();
        final SchemaPlus defaultSchema = config.getConverter().getDefaultSchema();
        final List<String> schemaPathGivenInCmd = Util.skipLast(table.names);
        final SchemaPlus schema = SchemaUtilites.findSchema(defaultSchema, schemaPathGivenInCmd);
        final String charset = Util.getDefaultCharset().name();
        if (schema == null) {
            SchemaUtilites.throwSchemaNotFoundException(defaultSchema, SchemaUtilites.SCHEMA_PATH_JOINER.join(schemaPathGivenInCmd));
        }
        if (SchemaUtilites.isRootSchema(schema)) {
            throw UserException.validationError().message("No schema selected.").build(logger);
        }
        final String tableName = Util.last(table.names);
        // find resolved schema path
        final String schemaPath = SchemaUtilites.unwrapAsDrillSchemaInstance(schema).getFullSchemaName();
        if (schema.getTable(tableName) == null) {
            throw UserException.validationError().message("Unknown table [%s] in schema [%s]", tableName, schemaPath).build(logger);
        }
        SqlNode schemaCondition = null;
        if (!SchemaUtilites.isRootSchema(schema)) {
            schemaCondition = DrillParserUtil.createCondition(new SqlIdentifier(SHRD_COL_TABLE_SCHEMA, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(schemaPath, charset, SqlParserPos.ZERO));
        }
        SqlNode where = DrillParserUtil.createCondition(new SqlIdentifier(SHRD_COL_TABLE_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(tableName, charset, SqlParserPos.ZERO));
        where = DrillParserUtil.createCondition(schemaCondition, SqlStdOperatorTable.AND, where);
        SqlNode columnFilter = null;
        if (node.getColumn() != null) {
            columnFilter = DrillParserUtil.createCondition(new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(node.getColumn().toString(), charset, SqlParserPos.ZERO));
        } else if (node.getColumnQualifier() != null) {
            columnFilter = DrillParserUtil.createCondition(new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.LIKE, node.getColumnQualifier());
        }
        where = DrillParserUtil.createCondition(where, SqlStdOperatorTable.AND, columnFilter);
        return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(selectList, SqlParserPos.ZERO), fromClause, where, null, null, null, null, null, null);
    } catch (Exception ex) {
        throw UserException.planError(ex).message("Error while rewriting DESCRIBE query: %d", ex.getMessage()).build(logger);
    }
}
Also used : SqlSelect(org.apache.calcite.sql.SqlSelect) DrillSqlDescribeTable(org.apache.drill.exec.planner.sql.parser.DrillSqlDescribeTable) SchemaPlus(org.apache.calcite.schema.SchemaPlus) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) UserException(org.apache.drill.common.exceptions.UserException) ValidationException(org.apache.calcite.tools.ValidationException) ForemanSetupException(org.apache.drill.exec.work.foreman.ForemanSetupException) RelConversionException(org.apache.calcite.tools.RelConversionException) SqlNode(org.apache.calcite.sql.SqlNode)

Example 8 with RelConversionException

use of org.apache.calcite.tools.RelConversionException in project herddb by diennea.

the class CalcitePlanner method translate.

@Override
public TranslatedQuery translate(String defaultTableSpace, String query, List<Object> parameters, boolean scan, boolean allowCache, boolean returnValues, int maxRows) throws StatementExecutionException {
    query = SQLPlanner.rewriteExecuteSyntax(query);
    if (query.startsWith("EXECUTE") || query.startsWith("CREATE") || query.startsWith("DROP") || query.startsWith("ALTER") || query.startsWith("TRUNCATE")) {
        return fallback.translate(defaultTableSpace, query, parameters, scan, allowCache, returnValues, maxRows);
    }
    if (parameters == null) {
        parameters = Collections.emptyList();
    }
    String cacheKey = "scan:" + scan + ",defaultTableSpace:" + defaultTableSpace + ",query:" + query + ",returnValues:" + returnValues + ",maxRows:" + maxRows;
    if (allowCache) {
        ExecutionPlan cached = cache.get(cacheKey);
        if (cached != null) {
            return new TranslatedQuery(cached, new SQLStatementEvaluationContext(query, parameters));
        }
    }
    if (!isCachable(query)) {
        allowCache = false;
    }
    try {
        if (query.startsWith("EXPLAIN ")) {
            query = query.substring("EXPLAIN ".length());
            PlannerResult plan = runPlanner(defaultTableSpace, query);
            PlannerOp finalPlan = convertRelNode(plan.topNode, plan.originalRowType, returnValues).optimize();
            ValuesOp values = new ValuesOp(manager.getNodeId(), new String[] { "name", "value" }, new Column[] { column("name", ColumnTypes.STRING), column("value", ColumnTypes.STRING) }, java.util.Arrays.asList(java.util.Arrays.asList(new ConstantExpression("query"), new ConstantExpression(query)), java.util.Arrays.asList(new ConstantExpression("logicalplan"), new ConstantExpression(RelOptUtil.dumpPlan("", plan.logicalPlan, SqlExplainFormat.TEXT, SqlExplainLevel.ALL_ATTRIBUTES))), java.util.Arrays.asList(new ConstantExpression("plan"), new ConstantExpression(RelOptUtil.dumpPlan("", plan.topNode, SqlExplainFormat.TEXT, SqlExplainLevel.ALL_ATTRIBUTES))), java.util.Arrays.asList(new ConstantExpression("finalplan"), new ConstantExpression(finalPlan + ""))));
            ExecutionPlan executionPlan = ExecutionPlan.simple(new SQLPlannedOperationStatement(values));
            return new TranslatedQuery(executionPlan, new SQLStatementEvaluationContext(query, parameters));
        }
        PlannerResult plan = runPlanner(defaultTableSpace, query);
        SQLPlannedOperationStatement sqlPlannedOperationStatement = new SQLPlannedOperationStatement(convertRelNode(plan.topNode, plan.originalRowType, returnValues).optimize());
        if (LOG.isLoggable(Level.FINE)) {
            LOG.log(Level.FINE, "Query: {0} --HerdDB Plan {1}", new Object[] { query, sqlPlannedOperationStatement.getRootOp() });
        }
        if (!scan) {
            ScanStatement scanStatement = sqlPlannedOperationStatement.unwrap(ScanStatement.class);
            if (scanStatement != null) {
                Table tableDef = scanStatement.getTableDef();
                CompiledSQLExpression where = scanStatement.getPredicate().unwrap(CompiledSQLExpression.class);
                SQLRecordKeyFunction keyFunction = findIndexAccess(where, tableDef.getPrimaryKey(), tableDef, "=", tableDef);
                if (keyFunction == null || !keyFunction.isFullPrimaryKey()) {
                    throw new StatementExecutionException("unsupported GET not on PK, bad where clause: " + query);
                }
                GetStatement get = new GetStatement(scanStatement.getTableSpace(), scanStatement.getTable(), keyFunction, scanStatement.getPredicate(), true);
                ExecutionPlan executionPlan = ExecutionPlan.simple(get);
                if (allowCache) {
                    cache.put(cacheKey, executionPlan);
                }
                return new TranslatedQuery(executionPlan, new SQLStatementEvaluationContext(query, parameters));
            }
        }
        if (maxRows > 0) {
            PlannerOp op = new LimitOp(sqlPlannedOperationStatement.getRootOp(), new ConstantExpression(maxRows), new ConstantExpression(0)).optimize();
            sqlPlannedOperationStatement = new SQLPlannedOperationStatement(op);
        }
        ExecutionPlan executionPlan = ExecutionPlan.simple(sqlPlannedOperationStatement);
        if (allowCache) {
            cache.put(cacheKey, executionPlan);
        }
        return new TranslatedQuery(executionPlan, new SQLStatementEvaluationContext(query, parameters));
    } catch (CalciteContextException ex) {
        LOG.log(Level.INFO, "Error while parsing '" + ex.getOriginalStatement() + "'", ex);
        // TODO can this be done better ?
        throw new StatementExecutionException(ex.getMessage());
    } catch (RelConversionException | ValidationException | SqlParseException ex) {
        LOG.log(Level.INFO, "Error while parsing '" + query + "'", ex);
        // TODO can this be done better ?
        throw new StatementExecutionException(ex.getMessage().replace("org.apache.calcite.runtime.CalciteContextException: ", ""));
    } catch (MetadataStorageManagerException ex) {
        LOG.log(Level.INFO, "Error while parsing '" + query + "'", ex);
        throw new StatementExecutionException(ex);
    }
}
Also used : PlannerOp(herddb.model.planner.PlannerOp) Table(herddb.model.Table) RelOptTable(org.apache.calcite.plan.RelOptTable) ProjectableFilterableTable(org.apache.calcite.schema.ProjectableFilterableTable) ScannableTable(org.apache.calcite.schema.ScannableTable) AbstractTable(org.apache.calcite.schema.impl.AbstractTable) ModifiableTable(org.apache.calcite.schema.ModifiableTable) ValidationException(org.apache.calcite.tools.ValidationException) SqlParseException(org.apache.calcite.sql.parser.SqlParseException) ConstantExpression(herddb.sql.expressions.ConstantExpression) CompiledSQLExpression(herddb.sql.expressions.CompiledSQLExpression) LimitOp(herddb.model.planner.LimitOp) ValuesOp(herddb.model.planner.ValuesOp) StatementExecutionException(herddb.model.StatementExecutionException) RelConversionException(org.apache.calcite.tools.RelConversionException) SQLPlannedOperationStatement(herddb.model.commands.SQLPlannedOperationStatement) MetadataStorageManagerException(herddb.metadata.MetadataStorageManagerException) CalciteContextException(org.apache.calcite.runtime.CalciteContextException) ExecutionPlan(herddb.model.ExecutionPlan) GetStatement(herddb.model.commands.GetStatement) ScanStatement(herddb.model.commands.ScanStatement)

Example 9 with RelConversionException

use of org.apache.calcite.tools.RelConversionException in project druid by druid-io.

the class DruidPlanner method plan.

/**
 * Plan an SQL query for execution, returning a {@link PlannerResult} which can be used to actually execute the query.
 *
 * Ideally, the query can be planned into a native Druid query, using {@link #planWithDruidConvention}, but will
 * fall-back to {@link #planWithBindableConvention} if this is not possible.
 *
 * In some future this could perhaps re-use some of the work done by {@link #validate()}
 * instead of repeating it, but that day is not today.
 */
public PlannerResult plan() throws SqlParseException, ValidationException, RelConversionException {
    resetPlanner();
    final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
    try {
        if (parsed.getIngestionGranularity() != null) {
            plannerContext.getQueryContext().put(DruidSqlInsert.SQL_INSERT_SEGMENT_GRANULARITY, plannerContext.getJsonMapper().writeValueAsString(parsed.getIngestionGranularity()));
        }
    } catch (JsonProcessingException e) {
        throw new ValidationException("Unable to serialize partition granularity.");
    }
    // the planner's type factory is not available until after parsing
    this.rexBuilder = new RexBuilder(planner.getTypeFactory());
    final SqlNode parameterizedQueryNode = rewriteDynamicParameters(parsed.getQueryNode());
    final SqlNode validatedQueryNode = planner.validate(parameterizedQueryNode);
    final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
    try {
        return planWithDruidConvention(rootQueryRel, parsed.getExplainNode(), parsed.getInsertNode());
    } catch (Exception e) {
        Throwable cannotPlanException = Throwables.getCauseOfType(e, RelOptPlanner.CannotPlanException.class);
        if (null == cannotPlanException) {
            // Not a CannotPlanningException, rethrow without trying with bindable
            throw e;
        }
        // any error, if it is plannable by the bindable convention
        if (parsed.getInsertNode() == null) {
            // Try again with BINDABLE convention. Used for querying Values and metadata tables.
            try {
                return planWithBindableConvention(rootQueryRel, parsed.getExplainNode());
            } catch (Exception e2) {
                e.addSuppressed(e2);
            }
        }
        Logger logger = log;
        if (!QueryContexts.isDebug(plannerContext.getQueryContext())) {
            logger = log.noStackTrace();
        }
        String errorMessage = buildSQLPlanningErrorMessage(cannotPlanException);
        logger.warn(e, errorMessage);
        throw new UnsupportedSQLQueryException(errorMessage);
    }
}
Also used : ValidationException(org.apache.calcite.tools.ValidationException) RexBuilder(org.apache.calcite.rex.RexBuilder) RelRoot(org.apache.calcite.rel.RelRoot) Logger(org.apache.druid.java.util.common.logger.Logger) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) ValidationException(org.apache.calcite.tools.ValidationException) SqlParseException(org.apache.calcite.sql.parser.SqlParseException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) RelConversionException(org.apache.calcite.tools.RelConversionException) SqlNode(org.apache.calcite.sql.SqlNode)

Example 10 with RelConversionException

use of org.apache.calcite.tools.RelConversionException in project drill by apache.

the class RewriteProjectToFlatten method visitProject.

@Override
public Prel visitProject(ProjectPrel project, Object unused) throws RelConversionException {
    List<RexNode> exprList = new ArrayList<>();
    boolean rewrite = false;
    List<RelDataTypeField> relDataTypes = new ArrayList<>();
    int i = 0;
    RexNode flatttenExpr = null;
    for (RexNode rex : project.getChildExps()) {
        RexNode newExpr = rex;
        if (rex instanceof RexCall) {
            RexCall function = (RexCall) rex;
            String functionName = function.getOperator().getName();
            if (functionName.equalsIgnoreCase("flatten")) {
                rewrite = true;
                if (function.getOperands().size() != 1) {
                    throw new RelConversionException("Flatten expression expects a single input.");
                }
                newExpr = function.getOperands().get(0);
                RexBuilder builder = new RexBuilder(factory);
                flatttenExpr = builder.makeInputRef(new RelDataTypeDrillImpl(new RelDataTypeHolder(), factory), i);
            }
        }
        relDataTypes.add(project.getRowType().getFieldList().get(i));
        i++;
        exprList.add(newExpr);
    }
    if (rewrite) {
        // TODO - figure out what is the right setting for the traits
        Prel newChild = ((Prel) project.getInput(0)).accept(this, null);
        ProjectPrel newProject = new ProjectPrel(project.getCluster(), project.getTraitSet(), newChild, exprList, new RelRecordType(relDataTypes));
        FlattenPrel flatten = new FlattenPrel(project.getCluster(), project.getTraitSet(), newProject, flatttenExpr);
        return flatten;
    }
    Prel child = ((Prel) project.getInput()).accept(this, null);
    if (child == project.getInput() && exprList.equals(project.getChildExps())) {
        return project;
    }
    return (Prel) project.copy(project.getTraitSet(), child, exprList, new RelRecordType(relDataTypes));
}
Also used : ProjectPrel(org.apache.drill.exec.planner.physical.ProjectPrel) ArrayList(java.util.ArrayList) RelDataTypeDrillImpl(org.apache.drill.exec.planner.types.RelDataTypeDrillImpl) RelRecordType(org.apache.calcite.rel.type.RelRecordType) RelConversionException(org.apache.calcite.tools.RelConversionException) Prel(org.apache.drill.exec.planner.physical.Prel) ProjectPrel(org.apache.drill.exec.planner.physical.ProjectPrel) FlattenPrel(org.apache.drill.exec.planner.physical.FlattenPrel) RexCall(org.apache.calcite.rex.RexCall) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) FlattenPrel(org.apache.drill.exec.planner.physical.FlattenPrel) RexBuilder(org.apache.calcite.rex.RexBuilder) RelDataTypeHolder(org.apache.drill.exec.planner.types.RelDataTypeHolder) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

RelConversionException (org.apache.calcite.tools.RelConversionException)9 ValidationException (org.apache.calcite.tools.ValidationException)7 SqlNode (org.apache.calcite.sql.SqlNode)5 SqlParseException (org.apache.calcite.sql.parser.SqlParseException)4 RelRoot (org.apache.calcite.rel.RelRoot)3 RexBuilder (org.apache.calcite.rex.RexBuilder)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)2 RelRecordType (org.apache.calcite.rel.type.RelRecordType)2 RexCall (org.apache.calcite.rex.RexCall)2 RexNode (org.apache.calcite.rex.RexNode)2 SchemaPlus (org.apache.calcite.schema.SchemaPlus)2 FlattenPrel (org.apache.drill.exec.planner.physical.FlattenPrel)2 Prel (org.apache.drill.exec.planner.physical.Prel)2 ProjectPrel (org.apache.drill.exec.planner.physical.ProjectPrel)2 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)1 MetadataStorageManagerException (herddb.metadata.MetadataStorageManagerException)1 ExecutionPlan (herddb.model.ExecutionPlan)1 StatementExecutionException (herddb.model.StatementExecutionException)1