use of org.apache.flink.table.data.StringData in project flink by apache.
the class JsonRowDataSerDeSchemaTest method testSerializationMapNullKey.
@Test
public void testSerializationMapNullKey() throws Exception {
RowType rowType = (RowType) ROW(FIELD("nestedMap", MAP(STRING(), MAP(STRING(), INT())))).getLogicalType();
// test data
// use LinkedHashMap to make sure entries order
Map<StringData, Integer> map = new LinkedHashMap<>();
map.put(StringData.fromString("no-null key"), 1);
map.put(StringData.fromString(null), 2);
GenericMapData mapData = new GenericMapData(map);
Map<StringData, GenericMapData> nestedMap = new LinkedHashMap<>();
nestedMap.put(StringData.fromString("no-null key"), mapData);
nestedMap.put(StringData.fromString(null), mapData);
GenericMapData nestedMapData = new GenericMapData(nestedMap);
GenericRowData rowData = new GenericRowData(1);
rowData.setField(0, nestedMapData);
JsonRowDataSerializationSchema serializationSchema1 = new JsonRowDataSerializationSchema(rowType, TimestampFormat.SQL, JsonFormatOptions.MapNullKeyMode.FAIL, "null", true);
// expect message for serializationSchema1
String errorMessage1 = "JSON format doesn't support to serialize map data with null keys." + " You can drop null key entries or encode null in literals by specifying map-null-key.mode option.";
JsonRowDataSerializationSchema serializationSchema2 = new JsonRowDataSerializationSchema(rowType, TimestampFormat.SQL, JsonFormatOptions.MapNullKeyMode.DROP, "null", true);
// expect result for serializationSchema2
String expectResult2 = "{\"nestedMap\":{\"no-null key\":{\"no-null key\":1}}}";
JsonRowDataSerializationSchema serializationSchema3 = new JsonRowDataSerializationSchema(rowType, TimestampFormat.SQL, JsonFormatOptions.MapNullKeyMode.LITERAL, "nullKey", true);
// expect result for serializationSchema3
String expectResult3 = "{\"nestedMap\":{\"no-null key\":{\"no-null key\":1,\"nullKey\":2},\"nullKey\":{\"no-null key\":1,\"nullKey\":2}}}";
try {
// throw exception when mapNullKey Mode is fail
serializationSchema1.serialize(rowData);
Assert.fail("expecting exception message: " + errorMessage1);
} catch (Throwable t) {
assertThat(t, FlinkMatchers.containsMessage(errorMessage1));
}
// mapNullKey Mode is drop
byte[] actual2 = serializationSchema2.serialize(rowData);
assertEquals(expectResult2, new String(actual2));
// mapNullKey Mode is literal
byte[] actual3 = serializationSchema3.serialize(rowData);
assertEquals(expectResult3, new String(actual3));
}
use of org.apache.flink.table.data.StringData in project flink by apache.
the class OrcBulkRowDataWriterTest method readList.
/**
* Read ListColumnVector with specify schema {@literal array<struct<_col2_col0:string>>}.
*/
private static ArrayData readList(ListColumnVector listVector, int row) {
int offset = (int) listVector.offsets[row];
StructColumnVector structChild = (StructColumnVector) listVector.child;
BytesColumnVector valueChild = (BytesColumnVector) structChild.fields[0];
StringData value1 = readStringData(valueChild, offset);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, value1);
StringData value2 = readStringData(valueChild, offset + 1);
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, (value2));
return new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
}
use of org.apache.flink.table.data.StringData in project flink by apache.
the class ArrowReaderWriterTest method getTestData.
@Override
public RowData[] getTestData() {
RowData row1 = StreamRecordUtils.row((byte) 1, (short) 2, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), GenericRowData.of(1, StringData.fromString("hello"), new GenericArrayData(new StringData[] { StringData.fromString("hello") }), TimestampData.fromEpochMillis(3600000), GenericRowData.of(1, StringData.fromString("hello"))));
BinaryRowData row2 = StreamRecordUtils.binaryrow((byte) 1, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new String[] { null, null, null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, GenericRowData.of(1, StringData.fromString("hello"))), new RowDataSerializer(rowFieldType)));
RowData row3 = StreamRecordUtils.row(null, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new String[] { null, null, null }), GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null));
BinaryRowData row4 = StreamRecordUtils.binaryrow((byte) 1, null, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null), new RowDataSerializer(rowFieldType)));
RowData row5 = StreamRecordUtils.row(new Object[fieldTypes.size()]);
BinaryRowData row6 = StreamRecordUtils.binaryrow(new Object[fieldTypes.size()]);
return new RowData[] { row1, row2, row3, row4, row5, row6 };
}
use of org.apache.flink.table.data.StringData in project flink by apache.
the class CanalJsonSerializationSchema method serialize.
@Override
public byte[] serialize(RowData row) {
try {
StringData opType = rowKind2String(row.getRowKind());
ArrayData arrayData = new GenericArrayData(new RowData[] { row });
reuse.setField(0, arrayData);
reuse.setField(1, opType);
return jsonSerializer.serialize(reuse);
} catch (Throwable t) {
throw new RuntimeException("Could not serialize row '" + row + "'.", t);
}
}
use of org.apache.flink.table.data.StringData in project flink by apache.
the class JsonObjectAggFunction method getValue.
@Override
public String getValue(Accumulator acc) {
final ObjectNode rootNode = createObjectNode();
try {
for (final StringData key : acc.map.keys()) {
final StringData value = acc.map.get(key);
final JsonNode valueNode = value == null ? NULL_NODE : getNodeFactory().rawValueNode(new RawValue(value.toString()));
rootNode.set(key.toString(), valueNode);
}
} catch (Exception e) {
throw new TableException("The accumulator state could not be serialized.", e);
}
return serializeJson(rootNode);
}
Aggregations