Search in sources :

Example 61 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class DescriptorProperties method putTableSchema.

/**
 * Adds a table schema under the given key.
 */
public void putTableSchema(String key, TableSchema schema) {
    checkNotNull(key);
    checkNotNull(schema);
    final String[] fieldNames = schema.getFieldNames();
    final DataType[] fieldTypes = schema.getFieldDataTypes();
    final String[] fieldExpressions = schema.getTableColumns().stream().map(column -> {
        if (column instanceof ComputedColumn) {
            return ((ComputedColumn) column).getExpression();
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldMetadata = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return ((MetadataColumn) column).getMetadataAlias().orElse(column.getName());
        }
        return null;
    }).toArray(String[]::new);
    final String[] fieldVirtual = schema.getTableColumns().stream().map(column -> {
        if (column instanceof MetadataColumn) {
            return Boolean.toString(((MetadataColumn) column).isVirtual());
        }
        return null;
    }).toArray(String[]::new);
    final List<List<String>> values = new ArrayList<>();
    for (int i = 0; i < schema.getFieldCount(); i++) {
        values.add(Arrays.asList(fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i]));
    }
    putIndexedOptionalProperties(key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values);
    if (!schema.getWatermarkSpecs().isEmpty()) {
        final List<List<String>> watermarkValues = new ArrayList<>();
        for (WatermarkSpec spec : schema.getWatermarkSpecs()) {
            watermarkValues.add(Arrays.asList(spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType().getLogicalType().asSerializableString()));
        }
        putIndexedFixedProperties(key + '.' + WATERMARK, Arrays.asList(WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues);
    }
    schema.getPrimaryKey().ifPresent(pk -> {
        putString(key + '.' + PRIMARY_KEY_NAME, pk.getName());
        putString(key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns()));
    });
}
Also used : DynamicTableFactory(org.apache.flink.table.factories.DynamicTableFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) EncodingUtils(org.apache.flink.table.utils.EncodingUtils) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) HashMap(java.util.HashMap) RowTypeInfo(org.apache.flink.api.java.typeutils.RowTypeInfo) Function(java.util.function.Function) Supplier(java.util.function.Supplier) MemorySize(org.apache.flink.configuration.MemorySize) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BigDecimal(java.math.BigDecimal) InstantiationUtil(org.apache.flink.util.InstantiationUtil) Matcher(java.util.regex.Matcher) TableColumn(org.apache.flink.table.api.TableColumn) Duration(java.time.Duration) Map(java.util.Map) ConfigOption(org.apache.flink.configuration.ConfigOption) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) CatalogPropertiesUtil(org.apache.flink.table.catalog.CatalogPropertiesUtil) TableException(org.apache.flink.table.api.TableException) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) TypeStringUtils(org.apache.flink.table.utils.TypeStringUtils) Objects(java.util.Objects) Consumer(java.util.function.Consumer) List(java.util.List) TimeUtils(org.apache.flink.util.TimeUtils) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) LogicalTypeParser(org.apache.flink.table.types.logical.utils.LogicalTypeParser) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Pattern(java.util.regex.Pattern) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Collections(java.util.Collections) MetadataColumn(org.apache.flink.table.api.TableColumn.MetadataColumn) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec) ArrayList(java.util.ArrayList) DataType(org.apache.flink.table.types.DataType) ComputedColumn(org.apache.flink.table.api.TableColumn.ComputedColumn) ArrayList(java.util.ArrayList) List(java.util.List)

Example 62 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class DataTypeUtilsTest method testExpandDistinctType.

@Test
public void testExpandDistinctType() {
    FieldsDataType dataType = (FieldsDataType) ROW(FIELD("f0", INT()), FIELD("f1", STRING()), FIELD("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), FIELD("f3", TIMESTAMP(3)));
    LogicalType originalLogicalType = dataType.getLogicalType();
    DistinctType distinctLogicalType = DistinctType.newBuilder(ObjectIdentifier.of("catalog", "database", "type"), originalLogicalType).build();
    DataType distinctDataType = new FieldsDataType(distinctLogicalType, dataType.getChildren());
    ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(distinctDataType);
    assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class))));
}
Also used : LocalDateTime(java.time.LocalDateTime) FieldsDataType(org.apache.flink.table.types.FieldsDataType) DistinctType(org.apache.flink.table.types.logical.DistinctType) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Timestamp(java.sql.Timestamp) Test(org.junit.Test)

Example 63 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class DataTypeUtilsTest method testExpandStructuredType.

@Test
public void testExpandStructuredType() {
    StructuredType logicalType = StructuredType.newBuilder(ObjectIdentifier.of("catalog", "database", "type")).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", DataTypes.INT().getLogicalType()), new StructuredType.StructuredAttribute("f1", DataTypes.STRING().getLogicalType()), new StructuredType.StructuredAttribute("f2", DataTypes.TIMESTAMP(5).getLogicalType()), new StructuredType.StructuredAttribute("f3", DataTypes.TIMESTAMP(3).getLogicalType()))).build();
    List<DataType> dataTypes = Arrays.asList(DataTypes.INT(), DataTypes.STRING(), DataTypes.TIMESTAMP(5).bridgedTo(Timestamp.class), DataTypes.TIMESTAMP(3));
    FieldsDataType dataType = new FieldsDataType(logicalType, dataTypes);
    ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(dataType);
    assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class))));
}
Also used : LocalDateTime(java.time.LocalDateTime) FieldsDataType(org.apache.flink.table.types.FieldsDataType) DataType(org.apache.flink.table.types.DataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Timestamp(java.sql.Timestamp) StructuredType(org.apache.flink.table.types.logical.StructuredType) Test(org.junit.Test)

Example 64 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class TypeMappingUtilsTest method testCheckPhysicalLogicalTypeCompatible.

@Test
public void testCheckPhysicalLogicalTypeCompatible() {
    TableSchema tableSchema = TableSchema.builder().field("a", DataTypes.VARCHAR(2)).field("b", DataTypes.DECIMAL(20, 2)).build();
    TableSink tableSink = new TestTableSink(tableSchema);
    LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType().getLogicalType();
    TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1);
    DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo);
    ResolvedSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType);
    DataType[] logicalDataTypes = tableSchema.getFieldDataTypes();
    List<DataType> physicalDataTypes = physicSchema.getColumnDataTypes();
    for (int i = 0; i < logicalDataTypes.length; i++) {
        TypeMappingUtils.checkPhysicalLogicalTypeCompatible(physicalDataTypes.get(i).getLogicalType(), logicalDataTypes[i].getLogicalType(), "physicalField", "logicalField", false);
    }
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) TableSink(org.apache.flink.table.sinks.TableSink) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) TupleTypeInfo(org.apache.flink.api.java.typeutils.TupleTypeInfo) Test(org.junit.Test)

Example 65 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class InternalDataUtils method resolveToExternalOrNull.

static Function<RowData, Row> resolveToExternalOrNull(DataType dataType) {
    try {
        // Create the converter
        Method getConverter = Class.forName("org.apache.flink.table.data.conversion.DataStructureConverters").getMethod("getConverter", DataType.class);
        Object converter = getConverter.invoke(null, dataType);
        // Open the converter
        converter.getClass().getMethod("open", ClassLoader.class).invoke(converter, Thread.currentThread().getContextClassLoader());
        Method toExternalOrNull = converter.getClass().getMethod("toExternalOrNull", Object.class);
        // Return the lambda to invoke the converter
        return rowData -> {
            try {
                return (Row) toExternalOrNull.invoke(converter, rowData);
            } catch (IllegalAccessException | InvocationTargetException e) {
                Assertions.fail("Something went wrong when trying to use the DataStructureConverter from flink-table-runtime", e);
                // For the compiler
                return null;
            }
        };
    } catch (ClassNotFoundException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) {
        Assertions.fail("Error when trying to use the RowData to Row conversion. " + "Perhaps you miss flink-table-runtime in your test classpath?", e);
        // For the compiler
        return null;
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) RowData(org.apache.flink.table.data.RowData) GenericArrayData(org.apache.flink.table.data.GenericArrayData) MapData(org.apache.flink.table.data.MapData) IntType(org.apache.flink.table.types.logical.IntType) ArrayType(org.apache.flink.table.types.logical.ArrayType) MapType(org.apache.flink.table.types.logical.MapType) Function(java.util.function.Function) InvocationTargetException(java.lang.reflect.InvocationTargetException) LinkedHashMap(java.util.LinkedHashMap) ArrayData(org.apache.flink.table.data.ArrayData) List(java.util.List) GenericRowData(org.apache.flink.table.data.GenericRowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) GenericMapData(org.apache.flink.table.data.GenericMapData) Assertions(org.junit.jupiter.api.Assertions) Row(org.apache.flink.types.Row) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) Method(java.lang.reflect.Method) MultisetType(org.apache.flink.table.types.logical.MultisetType) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) Method(java.lang.reflect.Method) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Aggregations

DataType (org.apache.flink.table.types.DataType)260 Test (org.junit.Test)72 RowType (org.apache.flink.table.types.logical.RowType)59 LogicalType (org.apache.flink.table.types.logical.LogicalType)58 RowData (org.apache.flink.table.data.RowData)54 List (java.util.List)38 FieldsDataType (org.apache.flink.table.types.FieldsDataType)32 ValidationException (org.apache.flink.table.api.ValidationException)31 ArrayList (java.util.ArrayList)29 Collectors (java.util.stream.Collectors)24 AtomicDataType (org.apache.flink.table.types.AtomicDataType)24 Map (java.util.Map)23 Internal (org.apache.flink.annotation.Internal)23 TableException (org.apache.flink.table.api.TableException)23 HashMap (java.util.HashMap)22 GenericRowData (org.apache.flink.table.data.GenericRowData)22 Row (org.apache.flink.types.Row)22 TableSchema (org.apache.flink.table.api.TableSchema)20 TypeConversions.fromLogicalToDataType (org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType)19 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)18