Search in sources :

Example 16 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class AvroSchemaConverterTest method testDataTypeToSchemaToDataTypeNonNullable.

/**
 * Test convert non-nullable data type to Avro schema then converts back.
 */
@Test
public void testDataTypeToSchemaToDataTypeNonNullable() {
    DataType dataType = DataTypes.ROW(DataTypes.FIELD("f_boolean", DataTypes.BOOLEAN().notNull()), // tinyint and smallint all convert to int
    DataTypes.FIELD("f_int", DataTypes.INT().notNull()), DataTypes.FIELD("f_bigint", DataTypes.BIGINT().notNull()), DataTypes.FIELD("f_float", DataTypes.FLOAT().notNull()), DataTypes.FIELD("f_double", DataTypes.DOUBLE().notNull()), // char converts to string
    DataTypes.FIELD("f_string", DataTypes.STRING().notNull()), // binary converts to bytes
    DataTypes.FIELD("f_varbinary", DataTypes.BYTES().notNull()), DataTypes.FIELD("f_timestamp", DataTypes.TIMESTAMP(3).notNull()), DataTypes.FIELD("f_date", DataTypes.DATE().notNull()), DataTypes.FIELD("f_time", DataTypes.TIME(3).notNull()), DataTypes.FIELD("f_decimal", DataTypes.DECIMAL(10, 0).notNull()), DataTypes.FIELD("f_row", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT().notNull()), DataTypes.FIELD("f1", DataTypes.TIMESTAMP(3).notNull())).notNull()), // map key is always not null
    DataTypes.FIELD("f_map", DataTypes.MAP(DataTypes.STRING().notNull(), DataTypes.INT().notNull()).notNull()), DataTypes.FIELD("f_array", DataTypes.ARRAY(DataTypes.INT().notNull()).notNull())).notNull();
    Schema schema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
    DataType converted = AvroSchemaConverter.convertToDataType(schema.toString());
    assertEquals(dataType, converted);
}
Also used : Schema(org.apache.avro.Schema) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) Test(org.junit.Test)

Example 17 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class AvroSchemaConverterTest method testSchemaToDataTypeToSchemaNonNullable.

/**
 * Test convert non-nullable Avro schema to data type then converts back.
 */
@Test
public void testSchemaToDataTypeToSchemaNonNullable() {
    String schemaStr = "{\n" + "  \"type\" : \"record\",\n" + "  \"name\" : \"record\",\n" + "  \"fields\" : [ {\n" + "    \"name\" : \"f_boolean\",\n" + "    \"type\" : \"boolean\"\n" + "  }, {\n" + "    \"name\" : \"f_int\",\n" + "    \"type\" : \"int\"\n" + "  }, {\n" + "    \"name\" : \"f_bigint\",\n" + "    \"type\" : \"long\"\n" + "  }, {\n" + "    \"name\" : \"f_float\",\n" + "    \"type\" : \"float\"\n" + "  }, {\n" + "    \"name\" : \"f_double\",\n" + "    \"type\" : \"double\"\n" + "  }, {\n" + "    \"name\" : \"f_string\",\n" + "    \"type\" : \"string\"\n" + "  }, {\n" + "    \"name\" : \"f_varbinary\",\n" + "    \"type\" : \"bytes\"\n" + "  }, {\n" + "    \"name\" : \"f_timestamp\",\n" + "    \"type\" : {\n" + "      \"type\" : \"long\",\n" + "      \"logicalType\" : \"timestamp-millis\"\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_date\",\n" + "    \"type\" : {\n" + "      \"type\" : \"int\",\n" + "      \"logicalType\" : \"date\"\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_time\",\n" + "    \"type\" : {\n" + "      \"type\" : \"int\",\n" + "      \"logicalType\" : \"time-millis\"\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_decimal\",\n" + "    \"type\" : {\n" + "      \"type\" : \"bytes\",\n" + "      \"logicalType\" : \"decimal\",\n" + "      \"precision\" : 10,\n" + "      \"scale\" : 0\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_row\",\n" + "    \"type\" : {\n" + "      \"type\" : \"record\",\n" + "      \"name\" : \"record_f_row\",\n" + "      \"fields\" : [ {\n" + "        \"name\" : \"f0\",\n" + "        \"type\" : \"int\"\n" + "      }, {\n" + "        \"name\" : \"f1\",\n" + "        \"type\" : {\n" + "          \"type\" : \"long\",\n" + "          \"logicalType\" : \"timestamp-millis\"\n" + "        }\n" + "      } ]\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_map\",\n" + "    \"type\" : {\n" + "      \"type\" : \"map\",\n" + "      \"values\" : \"int\"\n" + "    }\n" + "  }, {\n" + "    \"name\" : \"f_array\",\n" + "    \"type\" : {\n" + "      \"type\" : \"array\",\n" + "      \"items\" : \"int\"\n" + "    }\n" + "  } ]\n" + "}";
    DataType dataType = AvroSchemaConverter.convertToDataType(schemaStr);
    Schema schema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
    assertEquals(new Schema.Parser().parse(schemaStr), schema);
}
Also used : Schema(org.apache.avro.Schema) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) Test(org.junit.Test)

Example 18 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class CsvFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    CsvCommons.validateFormatOptions(formatOptions);
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            final CsvRowDataSerializationSchema.Builder schemaBuilder = new CsvRowDataSerializationSchema.Builder(rowType);
            configureSerializationSchema(formatOptions, schemaBuilder);
            return schemaBuilder.build();
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 19 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class CsvRowDataSerDeSchemaTest method testDeserializationWithTypesMismatch.

@Test
public void testDeserializationWithTypesMismatch() {
    DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", INT()));
    RowType rowType = (RowType) dataType.getLogicalType();
    CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
    String data = "Test,1,Test";
    String errorMessage = "Fail to deserialize at field: f2.";
    try {
        deserialize(deserSchemaBuilder, data);
        fail("expecting exception message:" + errorMessage);
    } catch (Throwable t) {
        assertThat(t, FlinkMatchers.containsMessage(errorMessage));
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) StringData.fromString(org.apache.flink.table.data.StringData.fromString) Test(org.junit.Test)

Example 20 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class CsvRowDataSerDeSchemaTest method testDeserialization.

@SuppressWarnings("unchecked")
private Row testDeserialization(boolean allowParsingErrors, boolean allowComments, String string) throws Exception {
    DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", STRING()));
    RowType rowType = (RowType) dataType.getLogicalType();
    CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType)).setIgnoreParseErrors(allowParsingErrors).setAllowComments(allowComments);
    RowData deserializedRow = deserialize(deserSchemaBuilder, string);
    return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(deserializedRow);
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) Row(org.apache.flink.types.Row)

Aggregations

DataType (org.apache.flink.table.types.DataType)260 Test (org.junit.Test)72 RowType (org.apache.flink.table.types.logical.RowType)59 LogicalType (org.apache.flink.table.types.logical.LogicalType)58 RowData (org.apache.flink.table.data.RowData)54 List (java.util.List)38 FieldsDataType (org.apache.flink.table.types.FieldsDataType)32 ValidationException (org.apache.flink.table.api.ValidationException)31 ArrayList (java.util.ArrayList)29 Collectors (java.util.stream.Collectors)24 AtomicDataType (org.apache.flink.table.types.AtomicDataType)24 Map (java.util.Map)23 Internal (org.apache.flink.annotation.Internal)23 TableException (org.apache.flink.table.api.TableException)23 HashMap (java.util.HashMap)22 GenericRowData (org.apache.flink.table.data.GenericRowData)22 Row (org.apache.flink.types.Row)22 TableSchema (org.apache.flink.table.api.TableSchema)20 TypeConversions.fromLogicalToDataType (org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType)19 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)18