Search in sources :

Example 6 with Column

use of org.apache.flink.table.catalog.Column in project flink by apache.

the class ColumnJsonDeserializer method deserialize.

@Override
public Column deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
    ObjectNode jsonNode = jsonParser.readValueAsTree();
    String columnName = jsonNode.required(NAME).asText();
    String columnKind = Optional.ofNullable(jsonNode.get(KIND)).map(JsonNode::asText).orElse(KIND_PHYSICAL);
    Column column;
    switch(columnKind) {
        case KIND_PHYSICAL:
            column = deserializePhysicalColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        case KIND_COMPUTED:
            column = deserializeComputedColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        case KIND_METADATA:
            column = deserializeMetadataColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        default:
            throw new ValidationException(String.format("Cannot recognize column type '%s'. Allowed types: %s.", columnKind, SUPPORTED_KINDS));
    }
    return column.withComment(deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) Column(org.apache.flink.table.catalog.Column)

Example 7 with Column

use of org.apache.flink.table.catalog.Column in project flink by apache.

the class ContextResolvedTableJsonDeserializer method areResolvedSchemasEqual.

private boolean areResolvedSchemasEqual(ResolvedSchema schemaFromPlan, ResolvedSchema schemaFromCatalog) {
    // For schema equality we check:
    // * Columns size and order
    // * For each column: name, kind (class) and type
    // * Check partition keys set equality
    List<Column> columnsFromPlan = schemaFromPlan.getColumns();
    List<Column> columnsFromCatalog = schemaFromCatalog.getColumns();
    if (columnsFromPlan.size() != columnsFromCatalog.size()) {
        return false;
    }
    for (int i = 0; i < columnsFromPlan.size(); i++) {
        Column columnFromPlan = columnsFromPlan.get(i);
        Column columnFromCatalog = columnsFromCatalog.get(i);
        if (!Objects.equals(columnFromPlan.getName(), columnFromCatalog.getName()) || !Objects.equals(columnFromPlan.getClass(), columnFromCatalog.getClass()) || !Objects.equals(columnFromPlan.getDataType(), columnFromCatalog.getDataType())) {
            return false;
        }
    }
    return Objects.equals(schemaFromPlan.getPrimaryKey(), schemaFromCatalog.getPrimaryKey());
}
Also used : Column(org.apache.flink.table.catalog.Column)

Example 8 with Column

use of org.apache.flink.table.catalog.Column in project flink by apache.

the class TableSchema method fromResolvedSchema.

/**
 * Helps to migrate to the new {@link ResolvedSchema} to old API methods.
 */
public static TableSchema fromResolvedSchema(ResolvedSchema resolvedSchema) {
    final TableSchema.Builder builder = TableSchema.builder();
    resolvedSchema.getColumns().stream().map(column -> {
        if (column instanceof Column.PhysicalColumn) {
            final Column.PhysicalColumn c = (Column.PhysicalColumn) column;
            return TableColumn.physical(c.getName(), c.getDataType());
        } else if (column instanceof Column.MetadataColumn) {
            final Column.MetadataColumn c = (Column.MetadataColumn) column;
            return TableColumn.metadata(c.getName(), c.getDataType(), c.getMetadataKey().orElse(null), c.isVirtual());
        } else if (column instanceof Column.ComputedColumn) {
            final Column.ComputedColumn c = (Column.ComputedColumn) column;
            return TableColumn.computed(c.getName(), c.getDataType(), c.getExpression().asSerializableString());
        }
        throw new IllegalArgumentException("Unsupported column type: " + column);
    }).forEach(builder::add);
    resolvedSchema.getWatermarkSpecs().forEach(spec -> builder.watermark(spec.getRowtimeAttribute(), spec.getWatermarkExpression().asSerializableString(), spec.getWatermarkExpression().getOutputDataType()));
    resolvedSchema.getPrimaryKey().ifPresent(pk -> builder.primaryKey(pk.getName(), pk.getColumns().toArray(new String[0])));
    return builder.build();
}
Also used : IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) ROW(org.apache.flink.table.api.DataTypes.ROW) Column(org.apache.flink.table.catalog.Column) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) PublicEvolving(org.apache.flink.annotation.PublicEvolving) HashMap(java.util.HashMap) PhysicalColumn(org.apache.flink.table.api.TableColumn.PhysicalColumn) Function(java.util.function.Function) TypeConversions.fromDataTypeToLegacyInfo(org.apache.flink.table.types.utils.TypeConversions.fromDataTypeToLegacyInfo) ArrayList(java.util.ArrayList) Map(java.util.Map) TypeConversions.fromLegacyInfoToDataType(org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) FIELD(org.apache.flink.table.api.DataTypes.FIELD) Nullable(javax.annotation.Nullable) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) CompositeType(org.apache.flink.api.common.typeutils.CompositeType) LogicalTypeChecks.isCompositeType(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType) UUID(java.util.UUID) Preconditions(org.apache.flink.util.Preconditions) StringUtils(org.apache.flink.util.StringUtils) Collectors(java.util.stream.Collectors) Field(org.apache.flink.table.api.DataTypes.Field) Objects(java.util.Objects) LogicalTypeChecks.canBeTimeAttributeType(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.canBeTimeAttributeType) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) Optional(java.util.Optional) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Row(org.apache.flink.types.Row) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) Column(org.apache.flink.table.catalog.Column) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) PhysicalColumn(org.apache.flink.table.api.TableColumn.PhysicalColumn) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) PhysicalColumn(org.apache.flink.table.api.TableColumn.PhysicalColumn) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) PhysicalColumn(org.apache.flink.table.api.TableColumn.PhysicalColumn)

Example 9 with Column

use of org.apache.flink.table.catalog.Column in project flink by apache.

the class TableEnvironmentImpl method executeInternal.

private TableResultInternal executeInternal(List<Transformation<?>> transformations, List<String> sinkIdentifierNames) {
    final String defaultJobName = "insert-into_" + String.join(",", sinkIdentifierNames);
    Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig.getConfiguration(), defaultJobName);
    try {
        JobClient jobClient = execEnv.executeAsync(pipeline);
        final List<Column> columns = new ArrayList<>();
        Long[] affectedRowCounts = new Long[transformations.size()];
        for (int i = 0; i < transformations.size(); ++i) {
            // use sink identifier name as field name
            columns.add(Column.physical(sinkIdentifierNames.get(i), DataTypes.BIGINT()));
            affectedRowCounts[i] = -1L;
        }
        return TableResultImpl.builder().jobClient(jobClient).resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(columns)).resultProvider(new InsertResultProvider(affectedRowCounts).setJobClient(jobClient)).build();
    } catch (Exception e) {
        throw new TableException("Failed to execute sql", e);
    }
}
Also used : TableException(org.apache.flink.table.api.TableException) Column(org.apache.flink.table.catalog.Column) ArrayList(java.util.ArrayList) JobClient(org.apache.flink.core.execution.JobClient) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) SqlParserException(org.apache.flink.table.api.SqlParserException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) Pipeline(org.apache.flink.api.dag.Pipeline)

Example 10 with Column

use of org.apache.flink.table.catalog.Column in project flink by apache.

the class TableauStyle method columnWidthsByType.

// Package private and private static methods to deal with complexity of string writing and
// formatting
/**
 * Try to derive column width based on column types. If result set is not small enough to be
 * stored in java heap memory, we can't determine column widths based on column values.
 */
static int[] columnWidthsByType(List<Column> columns, int maxColumnWidth, boolean printNullAsEmpty, boolean printRowKind) {
    // fill width with field names first
    final int[] colWidths = columns.stream().mapToInt(col -> col.getName().length()).toArray();
    // determine proper column width based on types
    for (int i = 0; i < columns.size(); ++i) {
        LogicalType type = columns.get(i).getDataType().getLogicalType();
        int len;
        switch(type.getTypeRoot()) {
            case TINYINT:
                // extra for negative value
                len = TinyIntType.PRECISION + 1;
                break;
            case SMALLINT:
                // extra for negative value
                len = SmallIntType.PRECISION + 1;
                break;
            case INTEGER:
                // extra for negative value
                len = IntType.PRECISION + 1;
                break;
            case BIGINT:
                // extra for negative value
                len = BigIntType.PRECISION + 1;
                break;
            case DECIMAL:
                len = ((DecimalType) type).getPrecision() + // extra for negative value and decimal point
                2;
                break;
            case BOOLEAN:
                // "true" or "false"
                len = 5;
                break;
            case DATE:
                // e.g. 9999-12-31
                len = 10;
                break;
            case TIME_WITHOUT_TIME_ZONE:
                int precision = ((TimeType) type).getPrecision();
                // 23:59:59[.999999999]
                len = precision == 0 ? 8 : precision + 9;
                break;
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                precision = ((TimestampType) type).getPrecision();
                len = timestampTypeColumnWidth(precision);
                break;
            case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
                precision = ((LocalZonedTimestampType) type).getPrecision();
                len = timestampTypeColumnWidth(precision);
                break;
            default:
                len = maxColumnWidth;
        }
        // adjust column width with potential null values
        len = printNullAsEmpty ? len : Math.max(len, PrintStyle.NULL_VALUE.length());
        colWidths[i] = Math.max(colWidths[i], len);
    }
    // add an extra column for row kind if necessary
    if (printRowKind) {
        final int[] ret = new int[columns.size() + 1];
        ret[0] = ROW_KIND_COLUMN.length();
        System.arraycopy(colWidths, 0, ret, 1, columns.size());
        return ret;
    } else {
        return colWidths;
    }
}
Also used : LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) PrintWriter(java.io.PrintWriter) RowData(org.apache.flink.table.data.RowData) EncodingUtils(org.apache.flink.table.utils.EncodingUtils) UCharacter(com.ibm.icu.lang.UCharacter) Iterator(java.util.Iterator) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) Column(org.apache.flink.table.catalog.Column) BigIntType(org.apache.flink.table.types.logical.BigIntType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) IntType(org.apache.flink.table.types.logical.IntType) ArrayList(java.util.ArrayList) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) TimestampType(org.apache.flink.table.types.logical.TimestampType) List(java.util.List) Stream(java.util.stream.Stream) DecimalType(org.apache.flink.table.types.logical.DecimalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) Internal(org.apache.flink.annotation.Internal) TimeType(org.apache.flink.table.types.logical.TimeType) UProperty(com.ibm.icu.lang.UProperty) Collections(java.util.Collections) Nullable(javax.annotation.Nullable) LogicalType(org.apache.flink.table.types.logical.LogicalType) DecimalType(org.apache.flink.table.types.logical.DecimalType) TimeType(org.apache.flink.table.types.logical.TimeType)

Aggregations

Column (org.apache.flink.table.catalog.Column)14 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)10 List (java.util.List)9 DataType (org.apache.flink.table.types.DataType)8 LogicalType (org.apache.flink.table.types.logical.LogicalType)8 Collections (java.util.Collections)7 ValidationException (org.apache.flink.table.api.ValidationException)7 ArrayList (java.util.ArrayList)6 Collectors (java.util.stream.Collectors)6 Stream (java.util.stream.Stream)6 RelDataType (org.apache.calcite.rel.type.RelDataType)6 Internal (org.apache.flink.annotation.Internal)6 Arrays (java.util.Arrays)4 Map (java.util.Map)4 Set (java.util.Set)4 RexNode (org.apache.calcite.rex.RexNode)4 ReadableConfig (org.apache.flink.configuration.ReadableConfig)4 DataStream (org.apache.flink.streaming.api.datastream.DataStream)4 TableConfig (org.apache.flink.table.api.TableConfig)4 TableException (org.apache.flink.table.api.TableException)4