Search in sources :

Example 6 with TableColumn

use of org.apache.flink.table.api.TableColumn in project flink-mirror by flink-ci.

the class SchemaValidator method deriveTableSinkSchema.

/**
 * Derives the table schema for a table sink. A sink ignores a proctime attribute and needs to
 * track the origin of a rowtime field.
 *
 * @deprecated This method combines two separate concepts of table schema and field mapping.
 *     This should be split into two methods once we have support for the corresponding
 *     interfaces (see FLINK-9870).
 */
@Deprecated
public static TableSchema deriveTableSinkSchema(DescriptorProperties properties) {
    TableSchema.Builder builder = TableSchema.builder();
    TableSchema tableSchema = properties.getTableSchema(SCHEMA);
    for (int i = 0; i < tableSchema.getFieldCount(); i++) {
        final TableColumn tableColumn = tableSchema.getTableColumns().get(i);
        final String fieldName = tableColumn.getName();
        final DataType dataType = tableColumn.getType();
        if (!tableColumn.isPhysical()) {
            // skip non-physical column
            continue;
        }
        boolean isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME).orElse(false);
        String tsType = SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE;
        boolean isRowtime = properties.containsKey(tsType);
        if (!isProctime && !isRowtime) {
            // check for a aliasing
            String aliasName = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM).orElse(fieldName);
            builder.field(aliasName, dataType);
        } else // only use the rowtime attribute if it references a field
        if (isRowtime) {
            switch(properties.getString(tsType)) {
                case ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD:
                    String field = properties.getString(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_FROM);
                    builder.field(field, dataType);
                    break;
                // insert the timestamp into the output
                default:
                    throw new TableException(format("Unsupported rowtime type '%s' for sink" + " table schema. Currently only '%s' is supported for table sinks.", dataType, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD));
            }
        }
    }
    return builder.build();
}
Also used : TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) TableColumn(org.apache.flink.table.api.TableColumn)

Example 7 with TableColumn

use of org.apache.flink.table.api.TableColumn in project flink-mirror by flink-ci.

the class TableFormatFactoryBase method deriveSchema.

// --------------------------------------------------------------------------------------------
/**
 * Finds the table schema that can be used for a format schema (without time attributes and
 * generated columns).
 */
public static TableSchema deriveSchema(Map<String, String> properties) {
    final DescriptorProperties descriptorProperties = new DescriptorProperties();
    descriptorProperties.putProperties(properties);
    final TableSchema.Builder builder = TableSchema.builder();
    final TableSchema tableSchema = descriptorProperties.getTableSchema(SCHEMA);
    for (int i = 0; i < tableSchema.getFieldCount(); i++) {
        final TableColumn tableColumn = tableSchema.getTableColumns().get(i);
        final String fieldName = tableColumn.getName();
        final DataType dataType = tableColumn.getType();
        if (!tableColumn.isPhysical()) {
            // skip non-physical columns
            continue;
        }
        final boolean isProctime = descriptorProperties.getOptionalBoolean(SCHEMA + '.' + i + '.' + SCHEMA_PROCTIME).orElse(false);
        final String timestampKey = SCHEMA + '.' + i + '.' + ROWTIME_TIMESTAMPS_TYPE;
        final boolean isRowtime = descriptorProperties.containsKey(timestampKey);
        if (!isProctime && !isRowtime) {
            // check for aliasing
            final String aliasName = descriptorProperties.getOptionalString(SCHEMA + '.' + i + '.' + SCHEMA_FROM).orElse(fieldName);
            builder.field(aliasName, dataType);
        } else // only use the rowtime attribute if it references a field
        if (isRowtime && descriptorProperties.isValue(timestampKey, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)) {
            final String aliasName = descriptorProperties.getString(SCHEMA + '.' + i + '.' + ROWTIME_TIMESTAMPS_FROM);
            builder.field(aliasName, dataType);
        }
    }
    return builder.build();
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) DataType(org.apache.flink.table.types.DataType) TableColumn(org.apache.flink.table.api.TableColumn)

Example 8 with TableColumn

use of org.apache.flink.table.api.TableColumn in project flink-mirror by flink-ci.

the class OperationConverterUtils method convertChangeColumn.

public static Operation convertChangeColumn(ObjectIdentifier tableIdentifier, SqlChangeColumn changeColumn, CatalogTable catalogTable, SqlValidator sqlValidator) {
    String oldName = changeColumn.getOldName().getSimple();
    if (catalogTable.getPartitionKeys().indexOf(oldName) >= 0) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = catalogTable.getSchema();
    boolean first = changeColumn.isFirst();
    String after = changeColumn.getAfter() == null ? null : changeColumn.getAfter().getSimple();
    TableColumn newTableColumn = toTableColumn(changeColumn.getNewColumn(), sqlValidator);
    TableSchema newSchema = changeColumn(oldSchema, oldName, newTableColumn, first, after);
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(changeColumn.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
// TODO: handle watermark and constraints
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn)

Example 9 with TableColumn

use of org.apache.flink.table.api.TableColumn in project flink-mirror by flink-ci.

the class OperationConverterUtils method convertAddReplaceColumns.

public static Operation convertAddReplaceColumns(ObjectIdentifier tableIdentifier, SqlAddReplaceColumns addReplaceColumns, CatalogTable catalogTable, SqlValidator sqlValidator) {
    // This is only used by the Hive dialect at the moment. In Hive, only non-partition columns
    // can be
    // added/replaced and users will only define non-partition columns in the new column list.
    // Therefore, we require
    // that partitions columns must appear last in the schema (which is inline with Hive).
    // Otherwise, we won't be
    // able to determine the column positions after the non-partition columns are replaced.
    TableSchema oldSchema = catalogTable.getSchema();
    int numPartCol = catalogTable.getPartitionKeys().size();
    Set<String> lastCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount()).stream().map(TableColumn::getName).collect(Collectors.toSet());
    if (!lastCols.equals(new HashSet<>(catalogTable.getPartitionKeys()))) {
        throw new ValidationException("ADD/REPLACE COLUMNS on partitioned tables requires partition columns to appear last");
    }
    // set non-partition columns
    TableSchema.Builder builder = TableSchema.builder();
    if (!addReplaceColumns.isReplace()) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, catalogTable.getSchema());
    }
    for (SqlNode sqlNode : addReplaceColumns.getNewColumns()) {
        builder.add(toTableColumn((SqlTableColumn) sqlNode, sqlValidator));
    }
    // set partition columns
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    // set properties
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(addReplaceColumns.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) HashSet(java.util.HashSet) SqlNode(org.apache.calcite.sql.SqlNode)

Example 10 with TableColumn

use of org.apache.flink.table.api.TableColumn in project flink-mirror by flink-ci.

the class OperationConverterUtils method changeColumn.

// change a column in the old table schema and return the updated table schema
public static TableSchema changeColumn(TableSchema oldSchema, String oldName, TableColumn newTableColumn, boolean first, String after) {
    int oldIndex = Arrays.asList(oldSchema.getFieldNames()).indexOf(oldName);
    if (oldIndex < 0) {
        throw new ValidationException(String.format("Old column %s not found for CHANGE COLUMN", oldName));
    }
    List<TableColumn> tableColumns = oldSchema.getTableColumns();
    if ((!first && after == null) || oldName.equals(after)) {
        tableColumns.set(oldIndex, newTableColumn);
    } else {
        // need to change column position
        tableColumns.remove(oldIndex);
        if (first) {
            tableColumns.add(0, newTableColumn);
        } else {
            int newIndex = tableColumns.stream().map(TableColumn::getName).collect(Collectors.toList()).indexOf(after);
            if (newIndex < 0) {
                throw new ValidationException(String.format("After column %s not found for CHANGE COLUMN", after));
            }
            tableColumns.add(newIndex + 1, newTableColumn);
        }
    }
    TableSchema.Builder builder = TableSchema.builder();
    for (TableColumn column : tableColumns) {
        builder.add(column);
    }
    setWatermarkAndPK(builder, oldSchema);
    return builder.build();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn)

Aggregations

TableColumn (org.apache.flink.table.api.TableColumn)24 TableSchema (org.apache.flink.table.api.TableSchema)23 HashMap (java.util.HashMap)14 ValidationException (org.apache.flink.table.api.ValidationException)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterTableSchemaOperation (org.apache.flink.table.operations.ddl.AlterTableSchemaOperation)12 SqlTableColumn (org.apache.flink.sql.parser.ddl.SqlTableColumn)9 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)7 LinkedHashMap (java.util.LinkedHashMap)6 CatalogTable (org.apache.flink.table.catalog.CatalogTable)6 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)6 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)6 DataType (org.apache.flink.table.types.DataType)6 HashSet (java.util.HashSet)3 SqlNode (org.apache.calcite.sql.SqlNode)3 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)3 TableException (org.apache.flink.table.api.TableException)3 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)3 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)3 ObjectPath (org.apache.flink.table.catalog.ObjectPath)3