use of org.apache.flink.table.types.logical.RowType in project flink by apache.
the class AvroSchemaConverterTest method testInvalidTimeTypeAvroSchemaConversion.
@Test
public void testInvalidTimeTypeAvroSchemaConversion() {
RowType rowType = (RowType) TableSchema.builder().field("a", DataTypes.STRING()).field("b", DataTypes.TIME(6)).build().toRowDataType().getLogicalType();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Avro does not support TIME type with precision: 6, it only supports precision less than 3.");
AvroSchemaConverter.convertToSchema(rowType);
}
use of org.apache.flink.table.types.logical.RowType in project flink by apache.
the class CsvFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
CsvCommons.validateFormatOptions(formatOptions);
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType rowType = (RowType) consumedDataType.getLogicalType();
final CsvRowDataSerializationSchema.Builder schemaBuilder = new CsvRowDataSerializationSchema.Builder(rowType);
configureSerializationSchema(formatOptions, schemaBuilder);
return schemaBuilder.build();
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.types.logical.RowType in project flink by apache.
the class CsvToRowDataConverters method createRowConverter.
public CsvToRowDataConverter createRowConverter(RowType rowType, boolean isTopLevel) {
final CsvToRowDataConverter[] fieldConverters = rowType.getFields().stream().map(RowType.RowField::getType).map(this::createNullableConverter).toArray(CsvToRowDataConverter[]::new);
final String[] fieldNames = rowType.getFieldNames().toArray(new String[0]);
final int arity = fieldNames.length;
return jsonNode -> {
int nodeSize = jsonNode.size();
if (nodeSize != 0) {
validateArity(arity, nodeSize, ignoreParseErrors);
} else {
return null;
}
GenericRowData row = new GenericRowData(arity);
for (int i = 0; i < arity; i++) {
JsonNode field;
// Jackson only supports mapping by name in the first level
if (isTopLevel) {
field = jsonNode.get(fieldNames[i]);
} else {
field = jsonNode.get(i);
}
try {
if (field == null) {
row.setField(i, null);
} else {
row.setField(i, fieldConverters[i].convert(field));
}
} catch (Throwable t) {
throw new RuntimeException(String.format("Fail to deserialize at field: %s.", fieldNames[i]), t);
}
}
return row;
};
}
use of org.apache.flink.table.types.logical.RowType in project flink by apache.
the class CsvRowDataSerDeSchemaTest method testDeserializationWithTypesMismatch.
@Test
public void testDeserializationWithTypesMismatch() {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", INT()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType));
String data = "Test,1,Test";
String errorMessage = "Fail to deserialize at field: f2.";
try {
deserialize(deserSchemaBuilder, data);
fail("expecting exception message:" + errorMessage);
} catch (Throwable t) {
assertThat(t, FlinkMatchers.containsMessage(errorMessage));
}
}
use of org.apache.flink.table.types.logical.RowType in project flink by apache.
the class CsvRowDataSerDeSchemaTest method testDeserialization.
@SuppressWarnings("unchecked")
private Row testDeserialization(boolean allowParsingErrors, boolean allowComments, String string) throws Exception {
DataType dataType = ROW(FIELD("f0", STRING()), FIELD("f1", INT()), FIELD("f2", STRING()));
RowType rowType = (RowType) dataType.getLogicalType();
CsvRowDataDeserializationSchema.Builder deserSchemaBuilder = new CsvRowDataDeserializationSchema.Builder(rowType, InternalTypeInfo.of(rowType)).setIgnoreParseErrors(allowParsingErrors).setAllowComments(allowComments);
RowData deserializedRow = deserialize(deserSchemaBuilder, string);
return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(deserializedRow);
}
Aggregations