Search in sources :

Example 1 with SqlTableConstraint

use of org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint in project flink by apache.

the class SqlCreateHiveTable method unparse.

@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
    writer.keyword("CREATE");
    if (isTemporary()) {
        writer.keyword("TEMPORARY");
    }
    if (isExternal) {
        writer.keyword("EXTERNAL");
    }
    writer.keyword("TABLE");
    if (ifNotExists) {
        writer.keyword("IF NOT EXISTS");
    }
    getTableName().unparse(writer, leftPrec, rightPrec);
    // columns
    SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.create("sds"), "(", ")");
    unparseColumns(creationContext, origColList, writer, leftPrec, rightPrec);
    for (SqlTableConstraint tableConstraint : creationContext.constraints) {
        printIndent(writer);
        tableConstraint.getConstraintNameIdentifier().ifPresent(name -> {
            writer.keyword("CONSTRAINT");
            name.unparse(writer, leftPrec, rightPrec);
        });
        writer.keyword("PRIMARY KEY");
        SqlWriter.Frame pkFrame = writer.startList("(", ")");
        tableConstraint.getColumns().unparse(writer, leftPrec, rightPrec);
        writer.endList(pkFrame);
        creationContext.pkTrait.unparse(writer, leftPrec, rightPrec);
    }
    writer.newlineAndIndent();
    writer.endList(frame);
    // table comment
    getComment().ifPresent(c -> {
        writer.keyword("COMMENT");
        c.unparse(writer, leftPrec, rightPrec);
    });
    // partitions
    if (origPartColList.size() > 0) {
        writer.newlineAndIndent();
        writer.keyword("PARTITIONED BY");
        SqlWriter.Frame partitionedByFrame = writer.startList("(", ")");
        unparseColumns(creationContext, origPartColList, writer, leftPrec, rightPrec);
        writer.newlineAndIndent();
        writer.endList(partitionedByFrame);
    }
    // row format
    unparseRowFormat(writer, leftPrec, rightPrec);
    // stored as
    unparseStoredAs(writer, leftPrec, rightPrec);
    // location
    if (location != null) {
        writer.newlineAndIndent();
        writer.keyword("LOCATION");
        location.unparse(writer, leftPrec, rightPrec);
    }
    // properties
    if (originPropList.size() > 0) {
        writer.newlineAndIndent();
        writer.keyword("TBLPROPERTIES");
        unparsePropList(originPropList, writer, leftPrec, rightPrec);
    }
}
Also used : SqlWriter(org.apache.calcite.sql.SqlWriter) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint)

Example 2 with SqlTableConstraint

use of org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint in project flink by apache.

the class SqlCreateTable method unparse.

@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
    writer.keyword("CREATE");
    if (isTemporary()) {
        writer.keyword("TEMPORARY");
    }
    writer.keyword("TABLE");
    if (isIfNotExists()) {
        writer.keyword("IF NOT EXISTS");
    }
    tableName.unparse(writer, leftPrec, rightPrec);
    if (columnList.size() > 0 || tableConstraints.size() > 0 || watermark != null) {
        SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.create("sds"), "(", ")");
        for (SqlNode column : columnList) {
            printIndent(writer);
            column.unparse(writer, leftPrec, rightPrec);
        }
        if (tableConstraints.size() > 0) {
            for (SqlTableConstraint constraint : tableConstraints) {
                printIndent(writer);
                constraint.unparse(writer, leftPrec, rightPrec);
            }
        }
        if (watermark != null) {
            printIndent(writer);
            watermark.unparse(writer, leftPrec, rightPrec);
        }
        writer.newlineAndIndent();
        writer.endList(frame);
    }
    if (comment != null) {
        writer.newlineAndIndent();
        writer.keyword("COMMENT");
        comment.unparse(writer, leftPrec, rightPrec);
    }
    if (this.partitionKeyList.size() > 0) {
        writer.newlineAndIndent();
        writer.keyword("PARTITIONED BY");
        SqlWriter.Frame partitionedByFrame = writer.startList("(", ")");
        this.partitionKeyList.unparse(writer, leftPrec, rightPrec);
        writer.endList(partitionedByFrame);
        writer.newlineAndIndent();
    }
    if (this.propertyList.size() > 0) {
        writer.keyword("WITH");
        SqlWriter.Frame withFrame = writer.startList("(", ")");
        for (SqlNode property : propertyList) {
            printIndent(writer);
            property.unparse(writer, leftPrec, rightPrec);
        }
        writer.newlineAndIndent();
        writer.endList(withFrame);
    }
    if (this.tableLike != null) {
        writer.newlineAndIndent();
        this.tableLike.unparse(writer, leftPrec, rightPrec);
    }
}
Also used : SqlWriter(org.apache.calcite.sql.SqlWriter) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) SqlNode(org.apache.calcite.sql.SqlNode) ExtendedSqlNode(org.apache.flink.sql.parser.ExtendedSqlNode)

Example 3 with SqlTableConstraint

use of org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint in project flink by apache.

the class SqlCreateTable method getFullConstraints.

/**
 * Returns the column constraints plus the table constraints.
 */
public List<SqlTableConstraint> getFullConstraints() {
    List<SqlTableConstraint> ret = new ArrayList<>();
    this.columnList.forEach(column -> {
        SqlTableColumn tableColumn = (SqlTableColumn) column;
        if (tableColumn instanceof SqlRegularColumn) {
            SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn;
            regularColumn.getConstraint().map(ret::add);
        }
    });
    ret.addAll(this.tableConstraints);
    return ret;
}
Also used : SqlRegularColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn.SqlRegularColumn) ArrayList(java.util.ArrayList) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint)

Example 4 with SqlTableConstraint

use of org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint in project flink by apache.

the class SqlToOperationConverter method convertAlterTable.

/**
 * convert ALTER TABLE statement.
 */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogView) {
        throw new ValidationException("ALTER TABLE for a view is not allowed");
    }
    if (sqlAlterTable instanceof SqlAlterTableRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
    } else if (sqlAlterTable instanceof SqlAlterTableOptions) {
        return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableReset) {
        return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
        SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
        validateTableConstraint(constraint);
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        // Sanity check for constraint.
        TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
        if (constraint.getConstraintName().isPresent()) {
            builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
        } else {
            builder.primaryKey(constraint.getColumnNames());
        }
        builder.build();
        return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
    } else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
        SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
        String constraintName = dropConstraint.getConstraintName().getSimple();
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
            throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
        }
        return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
    } else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
        return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlChangeColumn) {
        return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlAddPartitions) {
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        List<CatalogPartition> partitions = new ArrayList<>();
        SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
        for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
            Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
            partitions.add(new CatalogPartitionImpl(props, null));
        }
        return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
    } else if (sqlAlterTable instanceof SqlDropPartitions) {
        SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
        }
        return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
    } else if (sqlAlterTable instanceof SqlAlterTableCompact) {
        return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) SqlAlterTableReset(org.apache.flink.sql.parser.ddl.SqlAlterTableReset) ArrayList(java.util.ArrayList) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) SqlAlterTableRename(org.apache.flink.sql.parser.ddl.SqlAlterTableRename) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) SqlAddReplaceColumns(org.apache.flink.sql.parser.ddl.SqlAddReplaceColumns) SqlAddPartitions(org.apache.flink.sql.parser.ddl.SqlAddPartitions) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlAlterTableCompact(org.apache.flink.sql.parser.ddl.SqlAlterTableCompact) ArrayList(java.util.ArrayList) List(java.util.List) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) CatalogView(org.apache.flink.table.catalog.CatalogView) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) RelHint(org.apache.calcite.rel.hint.RelHint) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) SqlChangeColumn(org.apache.flink.sql.parser.ddl.SqlChangeColumn) SqlAlterTableOptions(org.apache.flink.sql.parser.ddl.SqlAlterTableOptions) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) SqlDropPartitions(org.apache.flink.sql.parser.ddl.SqlDropPartitions)

Example 5 with SqlTableConstraint

use of org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint in project flink by apache.

the class SqlCreateTableConverter method createCatalogTable.

private CatalogTable createCatalogTable(SqlCreateTable sqlCreateTable) {
    final TableSchema sourceTableSchema;
    final List<String> sourcePartitionKeys;
    final List<SqlTableLike.SqlTableLikeOption> likeOptions;
    final Map<String, String> sourceProperties;
    if (sqlCreateTable.getTableLike().isPresent()) {
        SqlTableLike sqlTableLike = sqlCreateTable.getTableLike().get();
        CatalogTable table = lookupLikeSourceTable(sqlTableLike);
        sourceTableSchema = TableSchema.fromResolvedSchema(table.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        sourcePartitionKeys = table.getPartitionKeys();
        likeOptions = sqlTableLike.getOptions();
        sourceProperties = table.getOptions();
    } else {
        sourceTableSchema = TableSchema.builder().build();
        sourcePartitionKeys = Collections.emptyList();
        likeOptions = Collections.emptyList();
        sourceProperties = Collections.emptyMap();
    }
    Map<SqlTableLike.FeatureOption, SqlTableLike.MergingStrategy> mergingStrategies = mergeTableLikeUtil.computeMergingStrategies(likeOptions);
    Map<String, String> mergedOptions = mergeOptions(sqlCreateTable, sourceProperties, mergingStrategies);
    Optional<SqlTableConstraint> primaryKey = sqlCreateTable.getFullConstraints().stream().filter(SqlTableConstraint::isPrimaryKey).findAny();
    TableSchema mergedSchema = mergeTableLikeUtil.mergeTables(mergingStrategies, sourceTableSchema, sqlCreateTable.getColumnList().getList(), sqlCreateTable.getWatermark().map(Collections::singletonList).orElseGet(Collections::emptyList), primaryKey.orElse(null));
    List<String> partitionKeys = mergePartitions(sourcePartitionKeys, sqlCreateTable.getPartitionKeyList(), mergingStrategies);
    verifyPartitioningColumnsExist(mergedSchema, partitionKeys);
    String tableComment = sqlCreateTable.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
    return new CatalogTableImpl(mergedSchema, partitionKeys, mergedOptions, tableComment);
}
Also used : CatalogManager(org.apache.flink.table.catalog.CatalogManager) Arrays(java.util.Arrays) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) CatalogTable(org.apache.flink.table.catalog.CatalogTable) HashMap(java.util.HashMap) Function(java.util.function.Function) SqlNode(org.apache.calcite.sql.SqlNode) Map(java.util.Map) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) Operation(org.apache.flink.table.operations.Operation) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) TableSchema(org.apache.flink.table.api.TableSchema) Collectors(java.util.stream.Collectors) Consumer(java.util.function.Consumer) SqlTableLike(org.apache.flink.sql.parser.ddl.SqlTableLike) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) FlinkCalciteSqlValidator(org.apache.flink.table.planner.calcite.FlinkCalciteSqlValidator) Optional(java.util.Optional) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) SqlTableLike(org.apache.flink.sql.parser.ddl.SqlTableLike) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) Collections(java.util.Collections)

Aggregations

SqlTableConstraint (org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint)5 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 List (java.util.List)2 Map (java.util.Map)2 SqlNode (org.apache.calcite.sql.SqlNode)2 SqlNodeList (org.apache.calcite.sql.SqlNodeList)2 SqlWriter (org.apache.calcite.sql.SqlWriter)2 TableSchema (org.apache.flink.table.api.TableSchema)2 ValidationException (org.apache.flink.table.api.ValidationException)2 CatalogTable (org.apache.flink.table.catalog.CatalogTable)2 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)2 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)2 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)2 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 LinkedHashMap (java.util.LinkedHashMap)1 Optional (java.util.Optional)1 Consumer (java.util.function.Consumer)1 Function (java.util.function.Function)1