use of org.apache.flink.table.types.DataType in project flink by apache.
the class DescriptorProperties method putTableSchema.
/**
* Adds a table schema under the given key.
*/
public void putTableSchema(String key, TableSchema schema) {
checkNotNull(key);
checkNotNull(schema);
final String[] fieldNames = schema.getFieldNames();
final DataType[] fieldTypes = schema.getFieldDataTypes();
final String[] fieldExpressions = schema.getTableColumns().stream().map(column -> {
if (column instanceof ComputedColumn) {
return ((ComputedColumn) column).getExpression();
}
return null;
}).toArray(String[]::new);
final String[] fieldMetadata = schema.getTableColumns().stream().map(column -> {
if (column instanceof MetadataColumn) {
return ((MetadataColumn) column).getMetadataAlias().orElse(column.getName());
}
return null;
}).toArray(String[]::new);
final String[] fieldVirtual = schema.getTableColumns().stream().map(column -> {
if (column instanceof MetadataColumn) {
return Boolean.toString(((MetadataColumn) column).isVirtual());
}
return null;
}).toArray(String[]::new);
final List<List<String>> values = new ArrayList<>();
for (int i = 0; i < schema.getFieldCount(); i++) {
values.add(Arrays.asList(fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i]));
}
putIndexedOptionalProperties(key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values);
if (!schema.getWatermarkSpecs().isEmpty()) {
final List<List<String>> watermarkValues = new ArrayList<>();
for (WatermarkSpec spec : schema.getWatermarkSpecs()) {
watermarkValues.add(Arrays.asList(spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType().getLogicalType().asSerializableString()));
}
putIndexedFixedProperties(key + '.' + WATERMARK, Arrays.asList(WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues);
}
schema.getPrimaryKey().ifPresent(pk -> {
putString(key + '.' + PRIMARY_KEY_NAME, pk.getName());
putString(key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns()));
});
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class DataTypeUtilsTest method testExpandDistinctType.
@Test
public void testExpandDistinctType() {
FieldsDataType dataType = (FieldsDataType) ROW(FIELD("f0", INT()), FIELD("f1", STRING()), FIELD("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), FIELD("f3", TIMESTAMP(3)));
LogicalType originalLogicalType = dataType.getLogicalType();
DistinctType distinctLogicalType = DistinctType.newBuilder(ObjectIdentifier.of("catalog", "database", "type"), originalLogicalType).build();
DataType distinctDataType = new FieldsDataType(distinctLogicalType, dataType.getChildren());
ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(distinctDataType);
assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class))));
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class DataTypeUtilsTest method testExpandStructuredType.
@Test
public void testExpandStructuredType() {
StructuredType logicalType = StructuredType.newBuilder(ObjectIdentifier.of("catalog", "database", "type")).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", DataTypes.INT().getLogicalType()), new StructuredType.StructuredAttribute("f1", DataTypes.STRING().getLogicalType()), new StructuredType.StructuredAttribute("f2", DataTypes.TIMESTAMP(5).getLogicalType()), new StructuredType.StructuredAttribute("f3", DataTypes.TIMESTAMP(3).getLogicalType()))).build();
List<DataType> dataTypes = Arrays.asList(DataTypes.INT(), DataTypes.STRING(), DataTypes.TIMESTAMP(5).bridgedTo(Timestamp.class), DataTypes.TIMESTAMP(3));
FieldsDataType dataType = new FieldsDataType(logicalType, dataTypes);
ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(dataType);
assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class))));
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class TypeMappingUtilsTest method testCheckPhysicalLogicalTypeCompatible.
@Test
public void testCheckPhysicalLogicalTypeCompatible() {
TableSchema tableSchema = TableSchema.builder().field("a", DataTypes.VARCHAR(2)).field("b", DataTypes.DECIMAL(20, 2)).build();
TableSink tableSink = new TestTableSink(tableSchema);
LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType().getLogicalType();
TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1);
DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo);
ResolvedSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType);
DataType[] logicalDataTypes = tableSchema.getFieldDataTypes();
List<DataType> physicalDataTypes = physicSchema.getColumnDataTypes();
for (int i = 0; i < logicalDataTypes.length; i++) {
TypeMappingUtils.checkPhysicalLogicalTypeCompatible(physicalDataTypes.get(i).getLogicalType(), logicalDataTypes[i].getLogicalType(), "physicalField", "logicalField", false);
}
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class InternalDataUtils method resolveToExternalOrNull.
static Function<RowData, Row> resolveToExternalOrNull(DataType dataType) {
try {
// Create the converter
Method getConverter = Class.forName("org.apache.flink.table.data.conversion.DataStructureConverters").getMethod("getConverter", DataType.class);
Object converter = getConverter.invoke(null, dataType);
// Open the converter
converter.getClass().getMethod("open", ClassLoader.class).invoke(converter, Thread.currentThread().getContextClassLoader());
Method toExternalOrNull = converter.getClass().getMethod("toExternalOrNull", Object.class);
// Return the lambda to invoke the converter
return rowData -> {
try {
return (Row) toExternalOrNull.invoke(converter, rowData);
} catch (IllegalAccessException | InvocationTargetException e) {
Assertions.fail("Something went wrong when trying to use the DataStructureConverter from flink-table-runtime", e);
// For the compiler
return null;
}
};
} catch (ClassNotFoundException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) {
Assertions.fail("Error when trying to use the RowData to Row conversion. " + "Perhaps you miss flink-table-runtime in your test classpath?", e);
// For the compiler
return null;
}
}
Aggregations