Search in sources :

Example 1 with SqlTableColumn

use of org.apache.flink.sql.parser.ddl.SqlTableColumn in project flink by apache.

the class SqlCreateHiveTable method extractPartColIdentifiers.

// Extract the identifiers from partition col list -- that's what SqlCreateTable expects for
// partition keys
private static SqlNodeList extractPartColIdentifiers(SqlNodeList partCols) {
    if (partCols == null) {
        return null;
    }
    SqlNodeList res = new SqlNodeList(partCols.getParserPosition());
    for (SqlNode node : partCols) {
        SqlTableColumn partCol = (SqlTableColumn) node;
        res.add(partCol.getName());
    }
    return res;
}
Also used : SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlNode(org.apache.calcite.sql.SqlNode) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn)

Example 2 with SqlTableColumn

use of org.apache.flink.sql.parser.ddl.SqlTableColumn in project flink by apache.

the class OperationConverterUtils method convertAddReplaceColumns.

public static Operation convertAddReplaceColumns(ObjectIdentifier tableIdentifier, SqlAddReplaceColumns addReplaceColumns, CatalogTable catalogTable, SqlValidator sqlValidator) {
    // This is only used by the Hive dialect at the moment. In Hive, only non-partition columns
    // can be
    // added/replaced and users will only define non-partition columns in the new column list.
    // Therefore, we require
    // that partitions columns must appear last in the schema (which is inline with Hive).
    // Otherwise, we won't be
    // able to determine the column positions after the non-partition columns are replaced.
    TableSchema oldSchema = catalogTable.getSchema();
    int numPartCol = catalogTable.getPartitionKeys().size();
    Set<String> lastCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount()).stream().map(TableColumn::getName).collect(Collectors.toSet());
    if (!lastCols.equals(new HashSet<>(catalogTable.getPartitionKeys()))) {
        throw new ValidationException("ADD/REPLACE COLUMNS on partitioned tables requires partition columns to appear last");
    }
    // set non-partition columns
    TableSchema.Builder builder = TableSchema.builder();
    if (!addReplaceColumns.isReplace()) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, catalogTable.getSchema());
    }
    for (SqlNode sqlNode : addReplaceColumns.getNewColumns()) {
        builder.add(toTableColumn((SqlTableColumn) sqlNode, sqlValidator));
    }
    // set partition columns
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    // set properties
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(addReplaceColumns.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) HashSet(java.util.HashSet) SqlNode(org.apache.calcite.sql.SqlNode)

Aggregations

SqlNode (org.apache.calcite.sql.SqlNode)2 SqlTableColumn (org.apache.flink.sql.parser.ddl.SqlTableColumn)2 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 SqlNodeList (org.apache.calcite.sql.SqlNodeList)1 TableColumn (org.apache.flink.table.api.TableColumn)1 TableSchema (org.apache.flink.table.api.TableSchema)1 ValidationException (org.apache.flink.table.api.ValidationException)1 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)1 AlterTableSchemaOperation (org.apache.flink.table.operations.ddl.AlterTableSchemaOperation)1