use of org.apache.flink.table.types.DataType in project flink by apache.
the class AvroSchemaConverterTest method testDataTypeToSchemaToDataTypeNonNullable.
/**
* Test convert non-nullable data type to Avro schema then converts back.
*/
@Test
public void testDataTypeToSchemaToDataTypeNonNullable() {
DataType dataType = DataTypes.ROW(DataTypes.FIELD("f_boolean", DataTypes.BOOLEAN().notNull()), // tinyint and smallint all convert to int
DataTypes.FIELD("f_int", DataTypes.INT().notNull()), DataTypes.FIELD("f_bigint", DataTypes.BIGINT().notNull()), DataTypes.FIELD("f_float", DataTypes.FLOAT().notNull()), DataTypes.FIELD("f_double", DataTypes.DOUBLE().notNull()), // char converts to string
DataTypes.FIELD("f_string", DataTypes.STRING().notNull()), // binary converts to bytes
DataTypes.FIELD("f_varbinary", DataTypes.BYTES().notNull()), DataTypes.FIELD("f_timestamp", DataTypes.TIMESTAMP(3).notNull()), DataTypes.FIELD("f_date", DataTypes.DATE().notNull()), DataTypes.FIELD("f_time", DataTypes.TIME(3).notNull()), DataTypes.FIELD("f_decimal", DataTypes.DECIMAL(10, 0).notNull()), DataTypes.FIELD("f_row", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT().notNull()), DataTypes.FIELD("f1", DataTypes.TIMESTAMP(3).notNull())).notNull()), // map key is always not null
DataTypes.FIELD("f_map", DataTypes.MAP(DataTypes.STRING().notNull(), DataTypes.INT().notNull()).notNull()), DataTypes.FIELD("f_array", DataTypes.ARRAY(DataTypes.INT().notNull()).notNull())).notNull();
Schema schema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
DataType converted = AvroSchemaConverter.convertToDataType(schema.toString());
assertEquals(dataType, converted);
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class AvroSchemaConverterTest method testSchemaToDataTypeToSchemaNonNullable.
/**
* Test convert non-nullable Avro schema to data type then converts back.
*/
@Test
public void testSchemaToDataTypeToSchemaNonNullable() {
String schemaStr = "{\n" + " \"type\" : \"record\",\n" + " \"name\" : \"record\",\n" + " \"fields\" : [ {\n" + " \"name\" : \"f_boolean\",\n" + " \"type\" : \"boolean\"\n" + " }, {\n" + " \"name\" : \"f_int\",\n" + " \"type\" : \"int\"\n" + " }, {\n" + " \"name\" : \"f_bigint\",\n" + " \"type\" : \"long\"\n" + " }, {\n" + " \"name\" : \"f_float\",\n" + " \"type\" : \"float\"\n" + " }, {\n" + " \"name\" : \"f_double\",\n" + " \"type\" : \"double\"\n" + " }, {\n" + " \"name\" : \"f_string\",\n" + " \"type\" : \"string\"\n" + " }, {\n" + " \"name\" : \"f_varbinary\",\n" + " \"type\" : \"bytes\"\n" + " }, {\n" + " \"name\" : \"f_timestamp\",\n" + " \"type\" : {\n" + " \"type\" : \"long\",\n" + " \"logicalType\" : \"timestamp-millis\"\n" + " }\n" + " }, {\n" + " \"name\" : \"f_date\",\n" + " \"type\" : {\n" + " \"type\" : \"int\",\n" + " \"logicalType\" : \"date\"\n" + " }\n" + " }, {\n" + " \"name\" : \"f_time\",\n" + " \"type\" : {\n" + " \"type\" : \"int\",\n" + " \"logicalType\" : \"time-millis\"\n" + " }\n" + " }, {\n" + " \"name\" : \"f_decimal\",\n" + " \"type\" : {\n" + " \"type\" : \"bytes\",\n" + " \"logicalType\" : \"decimal\",\n" + " \"precision\" : 10,\n" + " \"scale\" : 0\n" + " }\n" + " }, {\n" + " \"name\" : \"f_row\",\n" + " \"type\" : {\n" + " \"type\" : \"record\",\n" + " \"name\" : \"record_f_row\",\n" + " \"fields\" : [ {\n" + " \"name\" : \"f0\",\n" + " \"type\" : \"int\"\n" + " }, {\n" + " \"name\" : \"f1\",\n" + " \"type\" : {\n" + " \"type\" : \"long\",\n" + " \"logicalType\" : \"timestamp-millis\"\n" + " }\n" + " } ]\n" + " }\n" + " }, {\n" + " \"name\" : \"f_map\",\n" + " \"type\" : {\n" + " \"type\" : \"map\",\n" + " \"values\" : \"int\"\n" + " }\n" + " }, {\n" + " \"name\" : \"f_array\",\n" + " \"type\" : {\n" + " \"type\" : \"array\",\n" + " \"items\" : \"int\"\n" + " }\n" + " } ]\n" + "}";
DataType dataType = AvroSchemaConverter.convertToDataType(schemaStr);
Schema schema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
assertEquals(new Schema.Parser().parse(schemaStr), schema);
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class CsvFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
CsvCommons.validateFormatOptions(formatOptions);
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType rowType = (RowType) consumedDataType.getLogicalType();
final CsvRowDataSerializationSchema.Builder schemaBuilder = new CsvRowDataSerializationSchema.Builder(rowType);
configureSerializationSchema(formatOptions, schemaBuilder);
return schemaBuilder.build();
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class CsvRowDataSerDeSchemaTest method testDeserializationWithTypesMismatch.
@Test
public void testDeserializationWithTypesMismatch() {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", INT()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
String data = "Test,1,Test";
String errorMessage = "Fail to deserialize at field: f2.";
try {
deserialize(deserSchemaBuilder, data);
fail("expecting exception message:" + errorMessage);
} catch (Throwable t) {
assertThat(t, FlinkMatchers.containsMessage(errorMessage));
}
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class CsvRowDataSerDeSchemaTest method testDeserialization.
@SuppressWarnings("unchecked")
private Row testDeserialization(boolean allowParsingErrors, boolean allowComments, String string) throws Exception {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType)).setIgnoreParseErrors(allowParsingErrors).setAllowComments(allowComments);
RowData deserializedRow = deserialize(deserSchemaBuilder, string);
return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(deserializedRow);
}
Aggregations