use of org.apache.flink.table.data.GenericArrayData in project flink by apache.
the class OrcBulkRowDataWriterTest method readList.
/**
* Read ListColumnVector with specify schema {@literal array<struct<_col2_col0:string>>}.
*/
private static ArrayData readList(ListColumnVector listVector, int row) {
int offset = (int) listVector.offsets[row];
StructColumnVector structChild = (StructColumnVector) listVector.child;
BytesColumnVector valueChild = (BytesColumnVector) structChild.fields[0];
StringData value1 = readStringData(valueChild, offset);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, value1);
StringData value2 = readStringData(valueChild, offset + 1);
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, (value2));
return new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
}
use of org.apache.flink.table.data.GenericArrayData in project flink by apache.
the class ArrowReaderWriterTest method getTestData.
@Override
public RowData[] getTestData() {
RowData row1 = StreamRecordUtils.row((byte) 1, (short) 2, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), GenericRowData.of(1, StringData.fromString("hello"), new GenericArrayData(new StringData[] { StringData.fromString("hello") }), TimestampData.fromEpochMillis(3600000), GenericRowData.of(1, StringData.fromString("hello"))));
BinaryRowData row2 = StreamRecordUtils.binaryrow((byte) 1, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new String[] { null, null, null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, GenericRowData.of(1, StringData.fromString("hello"))), new RowDataSerializer(rowFieldType)));
RowData row3 = StreamRecordUtils.row(null, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new String[] { null, null, null }), GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null));
BinaryRowData row4 = StreamRecordUtils.binaryrow((byte) 1, null, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null), new RowDataSerializer(rowFieldType)));
RowData row5 = StreamRecordUtils.row(new Object[fieldTypes.size()]);
BinaryRowData row6 = StreamRecordUtils.binaryrow(new Object[fieldTypes.size()]);
return new RowData[] { row1, row2, row3, row4, row5, row6 };
}
use of org.apache.flink.table.data.GenericArrayData in project flink by apache.
the class PostgresRowConverter method createPostgresArrayConverter.
private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) {
// PG's bytea[] is wrapped in PGobject, rather than primitive byte arrays
if (arrayType.getElementType().is(LogicalTypeFamily.BINARY_STRING)) {
final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType());
return val -> {
PgArray pgArray = (PgArray) val;
Object[] in = (Object[]) pgArray.getArray();
final Object[] array = (Object[]) Array.newInstance(elementClass, in.length);
for (int i = 0; i < in.length; i++) {
array[i] = elementConverter.deserialize(((PGobject) in[i]).getValue().getBytes());
}
return new GenericArrayData(array);
};
} else {
final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType());
return val -> {
PgArray pgArray = (PgArray) val;
Object[] in = (Object[]) pgArray.getArray();
final Object[] array = (Object[]) Array.newInstance(elementClass, in.length);
for (int i = 0; i < in.length; i++) {
array[i] = elementConverter.deserialize(in[i]);
}
return new GenericArrayData(array);
};
}
}
use of org.apache.flink.table.data.GenericArrayData in project flink by apache.
the class CanalJsonSerializationSchema method serialize.
@Override
public byte[] serialize(RowData row) {
try {
StringData opType = rowKind2String(row.getRowKind());
ArrayData arrayData = new GenericArrayData(new RowData[] { row });
reuse.setField(0, arrayData);
reuse.setField(1, opType);
return jsonSerializer.serialize(reuse);
} catch (Throwable t) {
throw new RuntimeException("Could not serialize row '" + row + "'.", t);
}
}
use of org.apache.flink.table.data.GenericArrayData in project flink by apache.
the class AvroToRowDataConverters method createArrayConverter.
private static AvroToRowDataConverter createArrayConverter(ArrayType arrayType) {
final AvroToRowDataConverter elementConverter = createNullableConverter(arrayType.getElementType());
final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
return avroObject -> {
final List<?> list = (List<?>) avroObject;
final int length = list.size();
final Object[] array = (Object[]) Array.newInstance(elementClass, length);
for (int i = 0; i < length; ++i) {
array[i] = elementConverter.convert(list.get(i));
}
return new GenericArrayData(array);
};
}
Aggregations