use of org.apache.inlong.sort.formats.common.StringFormatInfo in project incubator-inlong by apache.
the class CommonUtilsTest method testBuildAvroRecordSchemaInJsonForNormalFields.
@Test
public void testBuildAvroRecordSchemaInJsonForNormalFields() throws IOException {
FieldInfo[] testFieldInfos = new FieldInfo[] { new FieldInfo("f1", new StringFormatInfo()), new FieldInfo("f2", new ByteFormatInfo()) };
JsonNode expectedJsonNode = objectMapper.readTree("{\n" + " \"type\":\"record\",\n" + " \"name\":\"record\",\n" + " \"fields\":[\n" + " {\n" + " \"name\":\"f1\",\n" + " \"type\":[\n" + " \"null\",\n" + " \"string\"\n" + " ],\n" + " \"default\":null\n" + " },\n" + " {\n" + " \"name\":\"f2\",\n" + " \"type\":[\n" + " \"null\",\n" + " \"int\"\n" + " ],\n" + " \"default\":null\n" + " }\n" + " ]\n" + "}");
String actualJson = buildAvroRecordSchemaInJson(testFieldInfos);
JsonNode actualJsonNode = objectMapper.readTree(actualJson);
assertEquals(expectedJsonNode, actualJsonNode);
}
use of org.apache.inlong.sort.formats.common.StringFormatInfo in project incubator-inlong by apache.
the class KafkaSinkInfoTest method init.
@Override
public void init() {
expectedObject = new KafkaSinkInfo(new FieldInfo[] { new FieldInfo("field1", new StringFormatInfo()) }, "testAddress", "testTopic", new JsonSerializationInfo());
expectedJson = "{\n" + " \"type\":\"kafka\",\n" + " \"fields\":[\n" + " {\n" + " \"type\":\"base\",\n" + " \"name\":\"field1\",\n" + " \"format_info\":{\n" + " \"type\":\"string\"\n" + " }\n" + " }\n" + " ],\n" + " \"address\":\"testAddress\",\n" + " \"topic\":\"testTopic\",\n" + " \"serialization_info\":{\n" + " \"type\":\"json\"\n" + " }\n" + "}";
equalObj1 = expectedObject;
equalObj2 = new KafkaSinkInfo(new FieldInfo[] { new FieldInfo("field1", new StringFormatInfo()) }, "testAddress", "testTopic", new JsonSerializationInfo());
unequalObj = new KafkaSinkInfo(new FieldInfo[] { new FieldInfo("field1", new StringFormatInfo()) }, "testAddress", "testTopic2", new JsonSerializationInfo());
}
use of org.apache.inlong.sort.formats.common.StringFormatInfo in project incubator-inlong by apache.
the class FieldMappingTransformerTest method testTransform.
@Test
public void testTransform() throws Exception {
final FieldInfo fieldInfo = new FieldInfo("id", new LongFormatInfo());
final FieldInfo extraFieldInfo = new FieldInfo("not_important", new StringFormatInfo());
final SourceInfo sourceInfo = new TestingSourceInfo(new FieldInfo[] { extraFieldInfo, fieldInfo });
final SinkInfo sinkInfo = new TestingSinkInfo(new FieldInfo[] { extraFieldInfo, fieldInfo });
final long dataFlowId = 1L;
final DataFlowInfo dataFlowInfo = new DataFlowInfo(dataFlowId, sourceInfo, sinkInfo);
final FieldMappingTransformer transformer = new FieldMappingTransformer(new Configuration());
transformer.addDataFlow(dataFlowInfo);
// should be 4 fields (2 origin fields + time + attr)
final Row sourceRow = new Row(2 + SOURCE_FIELD_SKIP_STEP);
sourceRow.setField(0, System.currentTimeMillis());
sourceRow.setField(1, "attr");
sourceRow.setField(2, "not important");
sourceRow.setField(3, 9527L);
final Record sourceRecord = new Record(dataFlowId, System.currentTimeMillis(), sourceRow);
final Record sinkRecord = transformer.transform(sourceRecord);
assertEquals(dataFlowId, sinkRecord.getDataflowId());
assertEquals(2, sinkRecord.getRow().getArity());
assertEquals("not important", sinkRecord.getRow().getField(0));
assertEquals(9527L, sinkRecord.getRow().getField(1));
}
use of org.apache.inlong.sort.formats.common.StringFormatInfo in project incubator-inlong by apache.
the class RecordTransformerTest method testTransformation.
@Test
public void testTransformation() throws Exception {
final int bufferSize = 1024;
final RecordTransformer transformer = new RecordTransformer(bufferSize);
final FieldInfo field1 = new FieldInfo("field1", new LongFormatInfo());
final FieldInfo field2 = new FieldInfo("field2", new StringFormatInfo());
final TestingSinkInfo sinkInfo = new TestingSinkInfo(new FieldInfo[] { field1, field2 });
final DataFlowInfo dataFlowInfo = new DataFlowInfo(1L, new EmptySourceInfo(), sinkInfo);
transformer.addDataFlow(dataFlowInfo);
final Row row = new Row(2);
row.setField(0, 1024L);
row.setField(1, "9527");
final Record record = new Record(1L, System.currentTimeMillis(), row);
final Record transformed = transformer.toRecord(transformer.toSerializedRecord(record));
assertEquals(record, transformed);
// check the buffers
assertEquals(0, transformer.getDataInputDeserializer().available());
assertEquals(0, transformer.getDataOutputSerializer().length());
assertEquals(bufferSize, transformer.getDataOutputSerializer().getSharedBuffer().length);
transformer.removeDataFlow(dataFlowInfo);
assertEquals(0, transformer.getRowSerializers().size());
}
use of org.apache.inlong.sort.formats.common.StringFormatInfo in project incubator-inlong by apache.
the class RecordTransformerTest method testRecordAndSerializerFieldNotMatch.
@Test
public void testRecordAndSerializerFieldNotMatch() throws Exception {
final int bufferSize = 1024;
final RecordTransformer transformer = new RecordTransformer(bufferSize);
final FieldInfo field1 = new FieldInfo("field1", new LongFormatInfo());
final FieldInfo field2 = new FieldInfo("field2", new StringFormatInfo());
final TestingSinkInfo sinkInfo = new TestingSinkInfo(new FieldInfo[] { field1, field2 });
final DataFlowInfo dataFlowInfo = new DataFlowInfo(1L, new EmptySourceInfo(), sinkInfo);
transformer.addDataFlow(dataFlowInfo);
final Row row = new Row(2);
row.setField(0, 1024L);
row.setField(1, 2048);
final Record record = new Record(1L, System.currentTimeMillis(), row);
try {
transformer.toSerializedRecord(record);
Assert.fail();
} catch (Exception expected) {
}
final FieldInfo newField1 = new FieldInfo("field1", new LongFormatInfo());
final FieldInfo newField2 = new FieldInfo("field2", new IntFormatInfo());
final TestingSinkInfo newSinkInfo = new TestingSinkInfo(new FieldInfo[] { newField1, newField2 });
final DataFlowInfo newDataFlowInfo = new DataFlowInfo(1L, new EmptySourceInfo(), newSinkInfo);
transformer.updateDataFlow(newDataFlowInfo);
SerializedRecord serializedRecord = transformer.toSerializedRecord(record);
transformer.updateDataFlow(dataFlowInfo);
try {
transformer.toRecord(serializedRecord);
Assert.fail();
} catch (Exception expected) {
}
}
Aggregations