Search in sources :

Example 56 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class ConnectorCatalogTable method calculateSourceSchema.

public static <T1> TableSchema calculateSourceSchema(TableSource<T1> source, boolean isBatch) {
    TableSchema tableSchema = source.getTableSchema();
    if (isBatch) {
        return tableSchema;
    }
    DataType[] types = Arrays.copyOf(tableSchema.getFieldDataTypes(), tableSchema.getFieldCount());
    String[] fieldNames = tableSchema.getFieldNames();
    if (source instanceof DefinedRowtimeAttributes) {
        updateRowtimeIndicators((DefinedRowtimeAttributes) source, fieldNames, types);
    }
    if (source instanceof DefinedProctimeAttribute) {
        updateProctimeIndicator((DefinedProctimeAttribute) source, fieldNames, types);
    }
    return TableSchema.builder().fields(fieldNames, types).build();
}
Also used : DefinedProctimeAttribute(org.apache.flink.table.sources.DefinedProctimeAttribute) TableSchema(org.apache.flink.table.api.TableSchema) DefinedRowtimeAttributes(org.apache.flink.table.sources.DefinedRowtimeAttributes) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType)

Example 57 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class SchemaValidator method deriveTableSinkSchema.

/**
 * Derives the table schema for a table sink. A sink ignores a proctime attribute and needs to
 * track the origin of a rowtime field.
 *
 * @deprecated This method combines two separate concepts of table schema and field mapping.
 *     This should be split into two methods once we have support for the corresponding
 *     interfaces (see FLINK-9870).
 */
@Deprecated
public static TableSchema deriveTableSinkSchema(DescriptorProperties properties) {
    TableSchema.Builder builder = TableSchema.builder();
    TableSchema tableSchema = properties.getTableSchema(SCHEMA);
    for (int i = 0; i < tableSchema.getFieldCount(); i++) {
        final TableColumn tableColumn = tableSchema.getTableColumns().get(i);
        final String fieldName = tableColumn.getName();
        final DataType dataType = tableColumn.getType();
        if (!tableColumn.isPhysical()) {
            // skip non-physical column
            continue;
        }
        boolean isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME).orElse(false);
        String tsType = SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE;
        boolean isRowtime = properties.containsKey(tsType);
        if (!isProctime && !isRowtime) {
            // check for a aliasing
            String aliasName = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM).orElse(fieldName);
            builder.field(aliasName, dataType);
        } else // only use the rowtime attribute if it references a field
        if (isRowtime) {
            switch(properties.getString(tsType)) {
                case ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD:
                    String field = properties.getString(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_FROM);
                    builder.field(field, dataType);
                    break;
                // insert the timestamp into the output
                default:
                    throw new TableException(format("Unsupported rowtime type '%s' for sink" + " table schema. Currently only '%s' is supported for table sinks.", dataType, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD));
            }
        }
    }
    return builder.build();
}
Also used : TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) TableColumn(org.apache.flink.table.api.TableColumn)

Example 58 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CsvTableSourceFactoryBase method createTableSource.

protected CsvTableSource createTableSource(Boolean isStreaming, Map<String, String> properties) {
    DescriptorProperties params = new DescriptorProperties();
    params.putProperties(properties);
    // validate
    new FileSystemValidator().validate(params);
    new OldCsvValidator().validate(params);
    new SchemaValidator(isStreaming, false, false).validate(params);
    // build
    CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder();
    TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
    // if a schema is defined, no matter derive schema is set or not, will use the defined
    // schema
    final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
    if (hasSchema) {
        TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
        // Ignore conversion classes in DataType
        if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
            throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSource, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
        }
    }
    params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path);
    params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter);
    params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter);
    for (int i = 0; i < tableSchema.getFieldCount(); ++i) {
        csvTableSourceBuilder.field(tableSchema.getFieldNames()[i], tableSchema.getFieldDataTypes()[i]);
    }
    params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter);
    params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix);
    params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> {
        if (flag) {
            csvTableSourceBuilder.ignoreFirstLine();
        }
    });
    params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> {
        if (flag) {
            csvTableSourceBuilder.ignoreParseErrors();
        }
    });
    return csvTableSourceBuilder.build();
}
Also used : TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator)

Example 59 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class TableSchemaUtils method builderWithGivenSchema.

/**
 * Creates a builder with given table schema.
 *
 * @param oriSchema Original schema
 * @return the builder with all the information from the given schema
 */
public static TableSchema.Builder builderWithGivenSchema(TableSchema oriSchema) {
    TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns());
    // Copy watermark specification.
    for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) {
        builder.watermark(wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType());
    }
    // Copy primary key constraint.
    oriSchema.getPrimaryKey().map(pk -> builder.primaryKey(pk.getName(), pk.getColumns().toArray(new String[0])));
    return builder;
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec)

Example 60 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class TableSchemaUtils method dropConstraint.

/**
 * Creates a new schema but drop the constraint with given name.
 */
public static TableSchema dropConstraint(TableSchema oriSchema, String constraintName) {
    // Validate the constraint name is valid.
    Optional<UniqueConstraint> uniqueConstraintOpt = oriSchema.getPrimaryKey();
    if (!uniqueConstraintOpt.isPresent() || !uniqueConstraintOpt.get().getName().equals(constraintName)) {
        throw new ValidationException(String.format("Constraint %s to drop does not exist", constraintName));
    }
    TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns());
    // Copy watermark specification.
    for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) {
        builder.watermark(wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType());
    }
    return builder.build();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7