use of org.apache.flink.table.types.DataType in project flink by apache.
the class HiveSimpleUDF method inferReturnType.
@Override
protected DataType inferReturnType() throws UDFArgumentException {
List<TypeInfo> argTypeInfo = new ArrayList<>();
for (DataType argType : argTypes) {
argTypeInfo.add(HiveTypeUtil.toHiveTypeInfo(argType, false));
}
Method evalMethod = hiveFunctionWrapper.createFunction().getResolver().getEvalMethod(argTypeInfo);
return HiveTypeUtil.toFlinkType(ObjectInspectorFactory.getReflectionObjectInspector(evalMethod.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA));
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class AvroRowDataDeSerializationSchemaTest method testSerializeDeserialize.
@Test
public void testSerializeDeserialize() throws Exception {
final DataType dataType = ROW(FIELD("bool", BOOLEAN()), FIELD("tinyint", TINYINT()), FIELD("smallint", SMALLINT()), FIELD("int", INT()), FIELD("bigint", BIGINT()), FIELD("float", FLOAT()), FIELD("double", DOUBLE()), FIELD("name", STRING()), FIELD("bytes", BYTES()), FIELD("decimal", DECIMAL(19, 6)), FIELD("doubles", ARRAY(DOUBLE())), FIELD("time", TIME(0)), FIELD("date", DATE()), FIELD("timestamp3", TIMESTAMP(3)), FIELD("timestamp3_2", TIMESTAMP(3)), FIELD("map", MAP(STRING(), BIGINT())), FIELD("map2map", MAP(STRING(), MAP(STRING(), INT()))), FIELD("map2array", MAP(STRING(), ARRAY(INT()))), FIELD("nullEntryMap", MAP(STRING(), STRING()))).notNull();
final RowType rowType = (RowType) dataType.getLogicalType();
final Schema schema = AvroSchemaConverter.convertToSchema(rowType);
final GenericRecord record = new GenericData.Record(schema);
record.put(0, true);
record.put(1, (int) Byte.MAX_VALUE);
record.put(2, (int) Short.MAX_VALUE);
record.put(3, 33);
record.put(4, 44L);
record.put(5, 12.34F);
record.put(6, 23.45);
record.put(7, "hello avro");
record.put(8, ByteBuffer.wrap(new byte[] { 1, 2, 4, 5, 6, 7, 8, 12 }));
record.put(9, ByteBuffer.wrap(BigDecimal.valueOf(123456789, 6).unscaledValue().toByteArray()));
List<Double> doubles = new ArrayList<>();
doubles.add(1.2);
doubles.add(3.4);
doubles.add(567.8901);
record.put(10, doubles);
record.put(11, 18397);
record.put(12, 10087);
record.put(13, 1589530213123L);
record.put(14, 1589530213122L);
Map<String, Long> map = new HashMap<>();
map.put("flink", 12L);
map.put("avro", 23L);
record.put(15, map);
Map<String, Map<String, Integer>> map2map = new HashMap<>();
Map<String, Integer> innerMap = new HashMap<>();
innerMap.put("inner_key1", 123);
innerMap.put("inner_key2", 234);
map2map.put("outer_key", innerMap);
record.put(16, map2map);
List<Integer> list1 = Arrays.asList(1, 2, 3, 4, 5, 6);
List<Integer> list2 = Arrays.asList(11, 22, 33, 44, 55);
Map<String, List<Integer>> map2list = new HashMap<>();
map2list.put("list1", list1);
map2list.put("list2", list2);
record.put(17, map2list);
Map<String, String> map2 = new HashMap<>();
map2.put("key1", null);
record.put(18, map2);
AvroRowDataSerializationSchema serializationSchema = createSerializationSchema(dataType);
AvroRowDataDeserializationSchema deserializationSchema = createDeserializationSchema(dataType);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GenericDatumWriter<IndexedRecord> datumWriter = new GenericDatumWriter<>(schema);
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(record, encoder);
encoder.flush();
byte[] input = byteArrayOutputStream.toByteArray();
RowData rowData = deserializationSchema.deserialize(input);
byte[] output = serializationSchema.serialize(rowData);
assertArrayEquals(input, output);
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class CsvFileFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<Factory<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
return new EncodingFormat<BulkWriter.Factory<RowData>>() {
@Override
public BulkWriter.Factory<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType physicalDataType) {
final RowType rowType = (RowType) physicalDataType.getLogicalType();
final CsvSchema schema = buildCsvSchema(rowType, formatOptions);
final RowDataToCsvConverter converter = RowDataToCsvConverters.createRowConverter(rowType);
final CsvMapper mapper = new CsvMapper();
final ObjectNode container = mapper.createObjectNode();
final RowDataToCsvConverter.RowDataToCsvFormatConverterContext converterContext = new RowDataToCsvConverter.RowDataToCsvFormatConverterContext(mapper, container);
return out -> CsvBulkWriter.forSchema(mapper, schema, converter, converterContext, out);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class AvroSchemaConverterTest method validateUserSchema.
private void validateUserSchema(DataType actual) {
final DataType address = DataTypes.ROW(DataTypes.FIELD("num", DataTypes.INT().notNull()), DataTypes.FIELD("street", DataTypes.STRING().notNull()), DataTypes.FIELD("city", DataTypes.STRING().notNull()), DataTypes.FIELD("state", DataTypes.STRING().notNull()), DataTypes.FIELD("zip", DataTypes.STRING().notNull()));
final DataType user = DataTypes.ROW(DataTypes.FIELD("name", DataTypes.STRING().notNull()), DataTypes.FIELD("favorite_number", DataTypes.INT()), DataTypes.FIELD("favorite_color", DataTypes.STRING()), DataTypes.FIELD("type_long_test", DataTypes.BIGINT()), DataTypes.FIELD("type_double_test", DataTypes.DOUBLE().notNull()), DataTypes.FIELD("type_null_test", DataTypes.NULL()), DataTypes.FIELD("type_bool_test", DataTypes.BOOLEAN().notNull()), DataTypes.FIELD("type_array_string", DataTypes.ARRAY(DataTypes.STRING().notNull()).notNull()), DataTypes.FIELD("type_array_boolean", DataTypes.ARRAY(DataTypes.BOOLEAN().notNull()).notNull()), DataTypes.FIELD("type_nullable_array", DataTypes.ARRAY(DataTypes.STRING().notNull())), DataTypes.FIELD("type_enum", DataTypes.STRING().notNull()), DataTypes.FIELD("type_map", DataTypes.MAP(DataTypes.STRING().notNull(), DataTypes.BIGINT().notNull()).notNull()), DataTypes.FIELD("type_fixed", DataTypes.VARBINARY(16)), DataTypes.FIELD("type_union", new AtomicDataType(new TypeInformationRawType<>(false, Types.GENERIC(Object.class)), Object.class)), DataTypes.FIELD("type_nested", address), DataTypes.FIELD("type_bytes", DataTypes.BYTES().notNull()), DataTypes.FIELD("type_date", DataTypes.DATE().notNull()), DataTypes.FIELD("type_time_millis", DataTypes.TIME(3).notNull()), DataTypes.FIELD("type_time_micros", DataTypes.TIME(6).notNull()), DataTypes.FIELD("type_timestamp_millis", DataTypes.TIMESTAMP(3).notNull()), DataTypes.FIELD("type_timestamp_micros", DataTypes.TIMESTAMP(6).notNull()), DataTypes.FIELD("type_decimal_bytes", DataTypes.DECIMAL(4, 2).notNull()), DataTypes.FIELD("type_decimal_fixed", DataTypes.DECIMAL(4, 2).notNull())).notNull();
assertEquals(user, actual);
}
use of org.apache.flink.table.types.DataType in project flink by apache.
the class AvroSchemaConverterTest method testSchemaToDataTypeToSchemaNullable.
/**
* Test convert nullable Avro schema to data type then converts back.
*/
@Test
public void testSchemaToDataTypeToSchemaNullable() {
String schemaStr = "{\n" + " \"type\" : \"record\",\n" + " \"name\" : \"record\",\n" + " \"fields\" : [ {\n" + " \"name\" : \"f_null\",\n" + " \"type\" : \"null\",\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_boolean\",\n" + " \"type\" : [ \"null\", \"boolean\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_int\",\n" + " \"type\" : [ \"null\", \"int\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_bigint\",\n" + " \"type\" : [ \"null\", \"long\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_float\",\n" + " \"type\" : [ \"null\", \"float\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_double\",\n" + " \"type\" : [ \"null\", \"double\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_string\",\n" + " \"type\" : [ \"null\", \"string\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_varbinary\",\n" + " \"type\" : [ \"null\", \"bytes\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_timestamp\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"long\",\n" + " \"logicalType\" : \"timestamp-millis\"\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_date\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"int\",\n" + " \"logicalType\" : \"date\"\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_time\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"int\",\n" + " \"logicalType\" : \"time-millis\"\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_decimal\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"bytes\",\n" + " \"logicalType\" : \"decimal\",\n" + " \"precision\" : 10,\n" + " \"scale\" : 0\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_row\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"record\",\n" + " \"name\" : \"record_f_row\",\n" + " \"fields\" : [ {\n" + " \"name\" : \"f0\",\n" + " \"type\" : [ \"null\", \"int\" ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f1\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"long\",\n" + " \"logicalType\" : \"timestamp-millis\"\n" + " } ],\n" + " \"default\" : null\n" + " } ]\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_map\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"map\",\n" + " \"values\" : [ \"null\", \"int\" ]\n" + " } ],\n" + " \"default\" : null\n" + " }, {\n" + " \"name\" : \"f_array\",\n" + " \"type\" : [ \"null\", {\n" + " \"type\" : \"array\",\n" + " \"items\" : [ \"null\", \"int\" ]\n" + " } ],\n" + " \"default\" : null\n" + " } ]\n" + "}";
DataType dataType = AvroSchemaConverter.convertToDataType(schemaStr);
Schema schema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
assertEquals(new Schema.Parser().parse(schemaStr), schema);
}
Aggregations