Search in sources :

Example 56 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class CanalJsonDecodingFormat method createRuntimeDecoder.

@Override
public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType physicalDataType, int[][] projections) {
    physicalDataType = Projection.of(projections).project(physicalDataType);
    final List<ReadableMetadata> readableMetadata = metadataKeys.stream().map(k -> Stream.of(ReadableMetadata.values()).filter(rm -> rm.key.equals(k)).findFirst().orElseThrow(IllegalStateException::new)).collect(Collectors.toList());
    final List<DataTypes.Field> metadataFields = readableMetadata.stream().map(m -> DataTypes.FIELD(m.key, m.dataType)).collect(Collectors.toList());
    final DataType producedDataType = DataTypeUtils.appendRowFields(physicalDataType, metadataFields);
    final TypeInformation<RowData> producedTypeInfo = context.createTypeInformation(producedDataType);
    return CanalJsonDeserializationSchema.builder(physicalDataType, readableMetadata, producedTypeInfo).setDatabase(database).setTable(table).setIgnoreParseErrors(ignoreParseErrors).setTimestampFormat(timestampFormat).build();
}
Also used : MetadataConverter(org.apache.flink.formats.json.canal.CanalJsonDeserializationSchema.MetadataConverter) DataType(org.apache.flink.table.types.DataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) RowData(org.apache.flink.table.data.RowData) TimestampData(org.apache.flink.table.data.TimestampData) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) DataTypes(org.apache.flink.table.api.DataTypes) TimestampFormat(org.apache.flink.formats.common.TimestampFormat) ProjectableDecodingFormat(org.apache.flink.table.connector.format.ProjectableDecodingFormat) Collectors(java.util.stream.Collectors) DeserializationSchema(org.apache.flink.api.common.serialization.DeserializationSchema) LinkedHashMap(java.util.LinkedHashMap) DecodingFormat(org.apache.flink.table.connector.format.DecodingFormat) List(java.util.List) GenericRowData(org.apache.flink.table.data.GenericRowData) Stream(java.util.stream.Stream) RowKind(org.apache.flink.types.RowKind) Map(java.util.Map) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) Collections(java.util.Collections) Nullable(javax.annotation.Nullable) Projection(org.apache.flink.table.connector.Projection) DataTypeUtils(org.apache.flink.table.types.utils.DataTypeUtils) RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) DataType(org.apache.flink.table.types.DataType)

Example 57 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class JsonFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    JsonFormatOptionsUtil.validateEncodingFormatOptions(formatOptions);
    TimestampFormat timestampOption = JsonFormatOptionsUtil.getTimestampFormat(formatOptions);
    JsonFormatOptions.MapNullKeyMode mapNullKeyMode = JsonFormatOptionsUtil.getMapNullKeyMode(formatOptions);
    String mapNullKeyLiteral = formatOptions.get(MAP_NULL_KEY_LITERAL);
    final boolean encodeDecimalAsPlainNumber = formatOptions.get(ENCODE_DECIMAL_AS_PLAIN_NUMBER);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new JsonRowDataSerializationSchema(rowType, timestampOption, mapNullKeyMode, mapNullKeyLiteral, encodeDecimalAsPlainNumber);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) TimestampFormat(org.apache.flink.formats.common.TimestampFormat)

Example 58 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class DependencyTest method testTableFactoryDiscovery.

@Test
public void testTableFactoryDiscovery() throws Exception {
    final LocalExecutor executor = createLocalExecutor();
    try {
        final TableResult tableResult = executeSql(executor, SESSION_ID, "DESCRIBE TableNumber1");
        assertEquals(tableResult.getResolvedSchema(), ResolvedSchema.physical(new String[] { "name", "type", "null", "key", "extras", "watermark" }, new DataType[] { DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING() }));
        List<Row> schemaData = Arrays.asList(Row.of("IntegerField1", "INT", true, null, null, null), Row.of("StringField1", "STRING", true, null, null, null), Row.of("rowtimeField", "TIMESTAMP(3) *ROWTIME*", true, null, null, "`rowtimeField`"));
        assertEquals(schemaData, CollectionUtil.iteratorToList(tableResult.collect()));
    } finally {
        executor.closeSession(SESSION_ID);
    }
}
Also used : TableResult(org.apache.flink.table.api.TableResult) DataType(org.apache.flink.table.types.DataType) Row(org.apache.flink.types.Row) Test(org.junit.Test)

Example 59 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class BaseMaterializedResultTest method createInternalBinaryRowDataConverter.

static Function<Row, BinaryRowData> createInternalBinaryRowDataConverter(DataType dataType) {
    DataStructureConverter<Object, Object> converter = DataStructureConverters.getConverter(dataType);
    RowDataSerializer serializer = new RowDataSerializer((RowType) dataType.getLogicalType());
    return row -> serializer.toBinaryRow((RowData) converter.toInternalOrNull(row)).copy();
}
Also used : DataType(org.apache.flink.table.types.DataType) List(java.util.List) RowData(org.apache.flink.table.data.RowData) DataStructureConverter(org.apache.flink.table.data.conversion.DataStructureConverter) DataStructureConverters(org.apache.flink.table.data.conversion.DataStructureConverters) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer) Row(org.apache.flink.types.Row) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) RowType(org.apache.flink.table.types.logical.RowType) Function(java.util.function.Function) Collectors(java.util.stream.Collectors) RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer)

Example 60 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class DescriptorProperties method getOptionalTableSchema.

/**
 * Returns a table schema under the given key if it exists.
 */
public Optional<TableSchema> getOptionalTableSchema(String key) {
    // filter for number of fields
    final int fieldCount = properties.keySet().stream().filter((k) -> k.startsWith(key) && // "key." is the prefix.
    SCHEMA_COLUMN_NAME_SUFFIX.matcher(k.substring(key.length() + 1)).matches()).mapToInt((k) -> 1).sum();
    if (fieldCount == 0) {
        return Optional.empty();
    }
    // validate fields and build schema
    final TableSchema.Builder schemaBuilder = TableSchema.builder();
    for (int i = 0; i < fieldCount; i++) {
        final String nameKey = key + '.' + i + '.' + NAME;
        final String legacyTypeKey = key + '.' + i + '.' + TYPE;
        final String typeKey = key + '.' + i + '.' + DATA_TYPE;
        final String exprKey = key + '.' + i + '.' + EXPR;
        final String metadataKey = key + '.' + i + '.' + METADATA;
        final String virtualKey = key + '.' + i + '.' + VIRTUAL;
        final String name = optionalGet(nameKey).orElseThrow(exceptionSupplier(nameKey));
        final DataType type;
        if (containsKey(typeKey)) {
            type = getDataType(typeKey);
        } else if (containsKey(legacyTypeKey)) {
            type = TypeConversions.fromLegacyInfoToDataType(getType(legacyTypeKey));
        } else {
            throw exceptionSupplier(typeKey).get();
        }
        final Optional<String> expr = optionalGet(exprKey);
        final Optional<String> metadata = optionalGet(metadataKey);
        final boolean virtual = getOptionalBoolean(virtualKey).orElse(false);
        // computed column
        if (expr.isPresent()) {
            schemaBuilder.add(TableColumn.computed(name, type, expr.get()));
        } else // metadata column
        if (metadata.isPresent()) {
            final String metadataAlias = metadata.get();
            if (metadataAlias.equals(name)) {
                schemaBuilder.add(TableColumn.metadata(name, type, virtual));
            } else {
                schemaBuilder.add(TableColumn.metadata(name, type, metadataAlias, virtual));
            }
        } else // physical column
        {
            schemaBuilder.add(TableColumn.physical(name, type));
        }
    }
    // extract watermark information
    // filter for number of fields
    String watermarkPrefixKey = key + '.' + WATERMARK;
    final int watermarkCount = properties.keySet().stream().filter((k) -> k.startsWith(watermarkPrefixKey) && k.endsWith('.' + WATERMARK_ROWTIME)).mapToInt((k) -> 1).sum();
    if (watermarkCount > 0) {
        for (int i = 0; i < watermarkCount; i++) {
            final String rowtimeKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_ROWTIME;
            final String exprKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_STRATEGY_EXPR;
            final String typeKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_STRATEGY_DATA_TYPE;
            final String rowtime = optionalGet(rowtimeKey).orElseThrow(exceptionSupplier(rowtimeKey));
            final String exprString = optionalGet(exprKey).orElseThrow(exceptionSupplier(exprKey));
            final String typeString = optionalGet(typeKey).orElseThrow(exceptionSupplier(typeKey));
            final DataType exprType = TypeConversions.fromLogicalToDataType(LogicalTypeParser.parse(typeString));
            schemaBuilder.watermark(rowtime, exprString, exprType);
        }
    }
    // Extract unique constraints.
    String pkConstraintNameKey = key + '.' + PRIMARY_KEY_NAME;
    final Optional<String> pkConstraintNameOpt = optionalGet(pkConstraintNameKey);
    if (pkConstraintNameOpt.isPresent()) {
        final String pkColumnsKey = key + '.' + PRIMARY_KEY_COLUMNS;
        final String columns = optionalGet(pkColumnsKey).orElseThrow(exceptionSupplier(pkColumnsKey));
        schemaBuilder.primaryKey(pkConstraintNameOpt.get(), columns.split(","));
    }
    return Optional.of(schemaBuilder.build());
}
Also used : DynamicTableFactory(org.apache.flink.table.factories.DynamicTableFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) EncodingUtils(org.apache.flink.table.utils.EncodingUtils) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) HashMap(java.util.HashMap) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) Function(java.util.function.Function) Supplier(java.util.function.Supplier) MemorySize(org.apache.flink.configuration.MemorySize) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BigDecimal(java.math.BigDecimal) InstantiationUtil(org.apache.flink.util.InstantiationUtil) Matcher(java.util.regex.Matcher) TableColumn(org.apache.flink.table.api.TableColumn) Duration(java.time.Duration) Map(java.util.Map) ConfigOption(org.apache.flink.configuration.ConfigOption) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) CatalogPropertiesUtil(org.apache.flink.table.catalog.CatalogPropertiesUtil) TableException(org.apache.flink.table.api.TableException) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) TypeStringUtils(org.apache.flink.table.utils.TypeStringUtils) Objects(java.util.Objects) Consumer(java.util.function.Consumer) List(java.util.List) TimeUtils(org.apache.flink.util.TimeUtils) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) LogicalTypeParser(org.apache.flink.table.types.logical.utils.LogicalTypeParser) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Pattern(java.util.regex.Pattern) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType)

Aggregations

DataType (org.apache.flink.table.types.DataType)260 Test (org.junit.Test)72 RowType (org.apache.flink.table.types.logical.RowType)59 LogicalType (org.apache.flink.table.types.logical.LogicalType)58 RowData (org.apache.flink.table.data.RowData)54 List (java.util.List)38 FieldsDataType (org.apache.flink.table.types.FieldsDataType)32 ValidationException (org.apache.flink.table.api.ValidationException)31 ArrayList (java.util.ArrayList)29 Collectors (java.util.stream.Collectors)24 AtomicDataType (org.apache.flink.table.types.AtomicDataType)24 Map (java.util.Map)23 Internal (org.apache.flink.annotation.Internal)23 TableException (org.apache.flink.table.api.TableException)23 HashMap (java.util.HashMap)22 GenericRowData (org.apache.flink.table.data.GenericRowData)22 Row (org.apache.flink.types.Row)22 TableSchema (org.apache.flink.table.api.TableSchema)20 TypeConversions.fromLogicalToDataType (org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType)19 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)18