use of org.apache.inlong.sort.formats.common.MapFormatInfo in project incubator-inlong by apache.
the class TableFormatUtils method deriveFormatInfo.
/**
* Derive the format information for the given type.
*
* @param logicalType The type whose format is derived.
* @return The format information for the given type.
*/
public static FormatInfo deriveFormatInfo(LogicalType logicalType) {
if (logicalType instanceof VarCharType) {
return StringFormatInfo.INSTANCE;
} else if (logicalType instanceof BooleanType) {
return BooleanFormatInfo.INSTANCE;
} else if (logicalType instanceof TinyIntType) {
return ByteFormatInfo.INSTANCE;
} else if (logicalType instanceof SmallIntType) {
return ShortFormatInfo.INSTANCE;
} else if (logicalType instanceof IntType) {
return IntFormatInfo.INSTANCE;
} else if (logicalType instanceof BigIntType) {
return LongFormatInfo.INSTANCE;
} else if (logicalType instanceof FloatType) {
return FloatFormatInfo.INSTANCE;
} else if (logicalType instanceof DoubleType) {
return DoubleFormatInfo.INSTANCE;
} else if (logicalType instanceof DecimalType) {
return DecimalFormatInfo.INSTANCE;
} else if (logicalType instanceof DateType) {
return new DateFormatInfo();
} else if (logicalType instanceof TimeType) {
return new TimeFormatInfo();
} else if (logicalType instanceof TimestampType) {
return new TimestampFormatInfo();
} else if (logicalType instanceof LocalZonedTimestampType) {
return new LocalZonedTimestampFormatInfo();
} else if (logicalType instanceof ArrayType) {
ArrayType arrayType = (ArrayType) logicalType;
LogicalType elementType = arrayType.getElementType();
FormatInfo elementFormatInfo = deriveFormatInfo(elementType);
return new ArrayFormatInfo(elementFormatInfo);
} else if (logicalType instanceof MapType) {
MapType mapType = (MapType) logicalType;
LogicalType keyType = mapType.getKeyType();
LogicalType valueType = mapType.getValueType();
FormatInfo keyFormatInfo = deriveFormatInfo(keyType);
FormatInfo valueFormatInfo = deriveFormatInfo(valueType);
return new MapFormatInfo(keyFormatInfo, valueFormatInfo);
} else if (logicalType instanceof RowType) {
RowType rowType = (RowType) logicalType;
List<RowType.RowField> rowFields = rowType.getFields();
String[] fieldNames = new String[rowFields.size()];
FormatInfo[] fieldFormatInfos = new FormatInfo[rowFields.size()];
for (int i = 0; i < rowFields.size(); ++i) {
RowType.RowField rowField = rowFields.get(i);
fieldNames[i] = rowField.getName();
fieldFormatInfos[i] = deriveFormatInfo(rowField.getType());
}
return new RowFormatInfo(fieldNames, fieldFormatInfos);
} else if (logicalType instanceof BinaryType) {
return BinaryFormatInfo.INSTANCE;
} else if (logicalType instanceof NullType) {
return NullFormatInfo.INSTANCE;
} else {
throw new UnsupportedOperationException();
}
}
use of org.apache.inlong.sort.formats.common.MapFormatInfo in project incubator-inlong by apache.
the class DebeziumDeserializationSchemaBuilder method getProducedFieldInfos.
public static FieldInfo[] getProducedFieldInfos(FieldInfo[] physicalFieldInfos) {
List<FieldInfo> results = new ArrayList<>();
results.add(new FieldInfo("metadata", new MapFormatInfo(StringFormatInfo.INSTANCE, StringFormatInfo.INSTANCE)));
results.addAll(Arrays.asList(physicalFieldInfos));
return results.toArray(new FieldInfo[0]);
}
use of org.apache.inlong.sort.formats.common.MapFormatInfo in project incubator-inlong by apache.
the class RowToJsonKafkaSinkTest method prepareData.
@Override
protected void prepareData() throws IOException, ClassNotFoundException {
topic = "test_kafka_row_to_json";
serializationSchema = SerializationSchemaFactory.build(new FieldInfo[] { new FieldInfo("f1", new StringFormatInfo()), new FieldInfo("f2", new MapFormatInfo(new StringFormatInfo(), new DoubleFormatInfo())), new FieldInfo("f3", new ArrayFormatInfo(new IntFormatInfo())) }, new JsonSerializationInfo());
prepareTestRows();
}
use of org.apache.inlong.sort.formats.common.MapFormatInfo in project incubator-inlong by apache.
the class CommonUtilsTest method testBuildAvroRecordSchemaInJsonForRecursiveFields.
@Test
public void testBuildAvroRecordSchemaInJsonForRecursiveFields() throws IOException {
FieldInfo[] testFieldInfos = new FieldInfo[] { new FieldInfo("f1", new ArrayFormatInfo(new MapFormatInfo(new StringFormatInfo(), new ArrayFormatInfo(new ArrayFormatInfo(new ShortFormatInfo()))))), new FieldInfo("f2", new MapFormatInfo(new StringFormatInfo(), new MapFormatInfo(new StringFormatInfo(), new RowFormatInfo(new String[] { "f21", "f22" }, new FormatInfo[] { new IntFormatInfo(), new ArrayFormatInfo(new ByteFormatInfo()) })))), new FieldInfo("f3", new RowFormatInfo(new String[] { "f31", "f32" }, new FormatInfo[] { new ArrayFormatInfo(new StringFormatInfo()), new RowFormatInfo(new String[] { "f321", "f322" }, new FormatInfo[] { new ArrayFormatInfo(new IntFormatInfo()), new MapFormatInfo(new StringFormatInfo(), new ArrayFormatInfo(new ByteFormatInfo())) }) })) };
JsonNode expectedJsonNode = objectMapper.readTree("{\n" + " \"type\":\"record\",\n" + " \"name\":\"record\",\n" + " \"fields\":[\n" + " {\n" + " \"name\":\"f1\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"map\",\n" + " \"values\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " \"int\"\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f2\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"map\",\n" + " \"values\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"map\",\n" + " \"values\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"record\",\n" + " \"name\":\"record_f2\",\n" + " \"fields\":[\n" + " {\n" + " \"name\":\"f21\",\n" + " \"type\":[\n" + " \"null\",\n" + " \"int\"\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f22\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " \"int\"\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f3\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"record\",\n" + " \"name\":\"record_f3\",\n" + " \"fields\":[\n" + " {\n" + " \"name\":\"f31\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " \"string\"\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f32\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"record\",\n" + " \"name\":\"record_f3_f32\",\n" + " \"fields\":[\n" + " {\n" + " \"name\":\"f321\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " \"int\"\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f322\",\n" + " \"type\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"map\",\n" + " \"values\":[\n" + " \"null\",\n" + " {\n" + " \"type\":\"array\",\n" + " \"items\":[\n" + " \"null\",\n" + " \"int\"\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"default\":null\n" + " }\n" + " ]\n" + "}");
String actualJson = buildAvroRecordSchemaInJson(testFieldInfos);
JsonNode actualJsonNode = objectMapper.readTree(actualJson);
assertEquals(expectedJsonNode, actualJsonNode);
}
use of org.apache.inlong.sort.formats.common.MapFormatInfo in project incubator-inlong by apache.
the class RowToAvroKafkaSinkTest method prepareData.
@Override
protected void prepareData() throws IOException, ClassNotFoundException {
fieldInfos = new FieldInfo[] { new FieldInfo("f1", new StringFormatInfo()), new FieldInfo("f2", new IntFormatInfo()), new FieldInfo("f3", new NullFormatInfo()), new FieldInfo("f4", new BinaryFormatInfo()), new FieldInfo("f5", new MapFormatInfo(new StringFormatInfo(), new RowFormatInfo(new String[] { "f51", "f52" }, new FormatInfo[] { new IntFormatInfo(), new ArrayFormatInfo(new DoubleFormatInfo()) }))) };
topic = "test_kafka_row_to_avro";
serializationSchema = SerializationSchemaFactory.build(fieldInfos, new AvroSerializationInfo());
prepareTestRows();
}
Aggregations