Search in sources :

Example 71 with SqlIdentifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project hazelcast by hazelcast.

the class HazelcastOperandTypeInference method inferOperandTypes.

@Override
public void inferOperandTypes(SqlCallBinding callBinding, RelDataType returnType, RelDataType[] operandTypes) {
    SqlCall call = callBinding.getCall();
    if (ValidationUtil.hasAssignment(call)) {
        RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
        RelDataType[] parameterTypes = new RelDataType[parametersByName.size()];
        for (int i = 0; i < call.operandCount(); i++) {
            SqlCall assignment = call.operand(i);
            SqlIdentifier id = assignment.operand(1);
            String name = id.getSimple();
            HazelcastTableFunctionParameter parameter = parametersByName.get(name);
            if (parameter != null) {
                SqlTypeName parameterType = parameter.type();
                parameterTypes[parameter.ordinal()] = toType(parameterType, typeFactory);
            } else {
                throw SqlUtil.newContextException(id.getParserPosition(), RESOURCE.unknownArgumentName(name));
            }
        }
        // noinspection ResultOfMethodCallIgnored
        Arrays.stream(parameterTypes).filter(Objects::nonNull).toArray(ignored -> operandTypes);
    } else {
        positionalOperandTypeInference.inferOperandTypes(callBinding, returnType, operandTypes);
    }
}
Also used : SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) SqlCall(org.apache.calcite.sql.SqlCall) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RelDataType(org.apache.calcite.rel.type.RelDataType) HazelcastTableFunctionParameter(com.hazelcast.jet.sql.impl.schema.HazelcastTableFunctionParameter) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier)

Example 72 with SqlIdentifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project drill by apache.

the class ShowFileHandler method getPlan.

@Override
public PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException {
    SqlIdentifier from = ((SqlShowFiles) sqlNode).getDb();
    DrillFileSystem fs = null;
    String defaultLocation = null;
    String fromDir = "./";
    SchemaPlus defaultSchema = config.getConverter().getDefaultSchema();
    SchemaPlus drillSchema = defaultSchema;
    // Show files can be used without from clause, in which case we display the files in the default schema
    if (from != null) {
        // We are not sure if the full from clause is just the schema or includes table name,
        // first try to see if the full path specified is a schema
        drillSchema = SchemaUtilites.findSchema(defaultSchema, from.names);
        if (drillSchema == null) {
            // Entire from clause is not a schema, try to obtain the schema without the last part of the specified clause.
            drillSchema = SchemaUtilites.findSchema(defaultSchema, from.names.subList(0, from.names.size() - 1));
            fromDir = fromDir + from.names.get((from.names.size() - 1));
        }
        if (drillSchema == null) {
            throw UserException.validationError().message("Invalid FROM/IN clause [%s]", from.toString()).build(logger);
        }
    }
    WorkspaceSchema wsSchema;
    try {
        wsSchema = (WorkspaceSchema) drillSchema.unwrap(AbstractSchema.class).getDefaultSchema();
    } catch (ClassCastException e) {
        throw UserException.validationError().message("SHOW FILES is supported in workspace type schema only. Schema [%s] is not a workspace schema.", SchemaUtilites.getSchemaPath(drillSchema)).build(logger);
    }
    // Get the file system object
    fs = wsSchema.getFS();
    // Get the default path
    defaultLocation = wsSchema.getDefaultLocation();
    List<ShowFilesCommandResult> rows = new ArrayList<>();
    for (FileStatus fileStatus : fs.list(false, new Path(defaultLocation, fromDir))) {
        ShowFilesCommandResult result = new ShowFilesCommandResult(fileStatus.getPath().getName(), fileStatus.isDir(), !fileStatus.isDir(), fileStatus.getLen(), fileStatus.getOwner(), fileStatus.getGroup(), fileStatus.getPermission().toString(), fileStatus.getAccessTime(), fileStatus.getModificationTime());
        rows.add(result);
    }
    return DirectPlan.createDirectPlan(context.getCurrentEndpoint(), rows.iterator(), ShowFilesCommandResult.class);
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) WorkspaceSchema(org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.WorkspaceSchema) SchemaPlus(org.apache.calcite.schema.SchemaPlus) ArrayList(java.util.ArrayList) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlShowFiles(org.apache.drill.exec.planner.sql.parser.SqlShowFiles) DrillFileSystem(org.apache.drill.exec.store.dfs.DrillFileSystem) AbstractSchema(org.apache.drill.exec.store.AbstractSchema)

Example 73 with SqlIdentifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project drill by axbaretto.

the class DescribeTableHandler method rewrite.

/**
 * Rewrite the parse tree as SELECT ... FROM INFORMATION_SCHEMA.COLUMNS ...
 */
@Override
public SqlNode rewrite(SqlNode sqlNode) throws RelConversionException, ForemanSetupException {
    DrillSqlDescribeTable node = unwrap(sqlNode, DrillSqlDescribeTable.class);
    try {
        List<SqlNode> selectList = ImmutableList.of((SqlNode) new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), new SqlIdentifier(COLS_COL_DATA_TYPE, SqlParserPos.ZERO), new SqlIdentifier(COLS_COL_IS_NULLABLE, SqlParserPos.ZERO));
        SqlNode fromClause = new SqlIdentifier(ImmutableList.of(IS_SCHEMA_NAME, TAB_COLUMNS), null, SqlParserPos.ZERO, null);
        final SqlIdentifier table = node.getTable();
        final SchemaPlus defaultSchema = config.getConverter().getDefaultSchema();
        final List<String> schemaPathGivenInCmd = Util.skipLast(table.names);
        final SchemaPlus schema = SchemaUtilites.findSchema(defaultSchema, schemaPathGivenInCmd);
        final String charset = Util.getDefaultCharset().name();
        if (schema == null) {
            SchemaUtilites.throwSchemaNotFoundException(defaultSchema, SchemaUtilites.SCHEMA_PATH_JOINER.join(schemaPathGivenInCmd));
        }
        if (SchemaUtilites.isRootSchema(schema)) {
            throw UserException.validationError().message("No schema selected.").build(logger);
        }
        final String tableName = Util.last(table.names);
        // find resolved schema path
        final String schemaPath = SchemaUtilites.unwrapAsDrillSchemaInstance(schema).getFullSchemaName();
        if (schema.getTable(tableName) == null) {
            throw UserException.validationError().message("Unknown table [%s] in schema [%s]", tableName, schemaPath).build(logger);
        }
        SqlNode schemaCondition = null;
        if (!SchemaUtilites.isRootSchema(schema)) {
            schemaCondition = DrillParserUtil.createCondition(new SqlIdentifier(SHRD_COL_TABLE_SCHEMA, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(schemaPath, charset, SqlParserPos.ZERO));
        }
        SqlNode where = DrillParserUtil.createCondition(new SqlIdentifier(SHRD_COL_TABLE_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(tableName, charset, SqlParserPos.ZERO));
        where = DrillParserUtil.createCondition(schemaCondition, SqlStdOperatorTable.AND, where);
        SqlNode columnFilter = null;
        if (node.getColumn() != null) {
            columnFilter = DrillParserUtil.createCondition(new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.EQUALS, SqlLiteral.createCharString(node.getColumn().toString(), charset, SqlParserPos.ZERO));
        } else if (node.getColumnQualifier() != null) {
            columnFilter = DrillParserUtil.createCondition(new SqlIdentifier(COLS_COL_COLUMN_NAME, SqlParserPos.ZERO), SqlStdOperatorTable.LIKE, node.getColumnQualifier());
        }
        where = DrillParserUtil.createCondition(where, SqlStdOperatorTable.AND, columnFilter);
        return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(selectList, SqlParserPos.ZERO), fromClause, where, null, null, null, null, null, null);
    } catch (Exception ex) {
        throw UserException.planError(ex).message("Error while rewriting DESCRIBE query: %d", ex.getMessage()).build(logger);
    }
}
Also used : SqlSelect(org.apache.calcite.sql.SqlSelect) DrillSqlDescribeTable(org.apache.drill.exec.planner.sql.parser.DrillSqlDescribeTable) SchemaPlus(org.apache.calcite.schema.SchemaPlus) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) UserException(org.apache.drill.common.exceptions.UserException) ValidationException(org.apache.calcite.tools.ValidationException) ForemanSetupException(org.apache.drill.exec.work.foreman.ForemanSetupException) RelConversionException(org.apache.calcite.tools.RelConversionException) SqlNode(org.apache.calcite.sql.SqlNode)

Example 74 with SqlIdentifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project drill by axbaretto.

the class UnsupportedOperatorsVisitor method detectMultiplePartitions.

/**
 * Disable multiple partitions in a SELECT-CLAUSE
 * If multiple partitions are defined in the query,
 * SqlUnsupportedException would be thrown to inform
 * @param sqlSelect SELECT-CLAUSE in the query
 */
private void detectMultiplePartitions(SqlSelect sqlSelect) {
    for (SqlNode nodeInSelectList : sqlSelect.getSelectList()) {
        // enter the first operand of AS operator
        if (nodeInSelectList.getKind() == SqlKind.AS && (((SqlCall) nodeInSelectList).getOperandList().get(0).getKind() == SqlKind.OVER)) {
            nodeInSelectList = ((SqlCall) nodeInSelectList).getOperandList().get(0);
        }
        if (nodeInSelectList.getKind() != SqlKind.OVER) {
            continue;
        }
        // This is used to keep track of the window function which has been defined
        SqlNode definedWindow = null;
        SqlNode window = ((SqlCall) nodeInSelectList).operand(1);
        // which is defined in the window list
        if (window instanceof SqlIdentifier) {
            // Expand the SqlIdentifier as the expression defined in the window list
            for (SqlNode sqlNode : sqlSelect.getWindowList()) {
                if (((SqlWindow) sqlNode).getDeclName().equalsDeep(window, false)) {
                    window = sqlNode;
                    break;
                }
            }
            assert !(window instanceof SqlIdentifier) : "Identifier should have been expanded as a window defined in the window list";
        }
        // In a SELECT-SCOPE, only a partition can be defined
        if (definedWindow == null) {
            definedWindow = window;
        } else {
            if (!definedWindow.equalsDeep(window, false)) {
                unsupportedOperatorCollector.setException(SqlUnsupportedException.ExceptionType.FUNCTION, "Multiple window definitions in a single SELECT list is not currently supported \n" + "See Apache Drill JIRA: DRILL-3196");
                throw new UnsupportedOperationException();
            }
        }
    }
}
Also used : SqlCall(org.apache.calcite.sql.SqlCall) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Example 75 with SqlIdentifier

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project drill by axbaretto.

the class DrillCompoundIdentifier method getAsSqlNode.

public SqlNode getAsSqlNode() {
    if (ids.size() == 1) {
        return new SqlIdentifier(Collections.singletonList(ids.get(0).value), ids.get(0).parserPos);
    }
    int startIndex;
    SqlNode node;
    if (ids.get(1).isArray()) {
        // handle everything post zero index as item operator.
        startIndex = 1;
        node = new SqlIdentifier(ImmutableList.of(ids.get(0).value), null, ids.get(0).parserPos, ImmutableList.of(ids.get(0).parserPos));
    } else {
        // handle everything post two index as item operator.
        startIndex = 2;
        node = new SqlIdentifier(// Replaces star by empty string. See SqlIdentifier#isStar()
        ImmutableList.of(ids.get(0).value, STAR_TO_EMPTY.apply(ids.get(1).value)), null, ids.get(0).parserPos, ImmutableList.of(ids.get(0).parserPos, ids.get(1).parserPos));
    }
    for (int i = startIndex; i < ids.size(); i++) {
        node = ids.get(i).getNode(node);
    }
    return node;
}
Also used : SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Aggregations

SqlIdentifier (org.apache.calcite.sql.SqlIdentifier)131 SqlNode (org.apache.calcite.sql.SqlNode)84 RelDataType (org.apache.calcite.rel.type.RelDataType)46 SqlNodeList (org.apache.calcite.sql.SqlNodeList)43 ArrayList (java.util.ArrayList)41 SqlCall (org.apache.calcite.sql.SqlCall)32 BitString (org.apache.calcite.util.BitString)28 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)21 SqlSelect (org.apache.calcite.sql.SqlSelect)21 SqlParserPos (org.apache.calcite.sql.parser.SqlParserPos)12 SchemaPlus (org.apache.calcite.schema.SchemaPlus)11 SqlOperator (org.apache.calcite.sql.SqlOperator)11 NlsString (org.apache.calcite.util.NlsString)11 List (java.util.List)9 Map (java.util.Map)9 RelOptTable (org.apache.calcite.plan.RelOptTable)9 RexNode (org.apache.calcite.rex.RexNode)9 AbstractSchema (org.apache.drill.exec.store.AbstractSchema)9 ImmutableList (com.google.common.collect.ImmutableList)8 RelNode (org.apache.calcite.rel.RelNode)7