use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testBinary.
@Test
public void testBinary() {
BinaryRowData row = new BinaryRowData(2);
BinaryRowWriter writer = new BinaryRowWriter(row);
byte[] bytes1 = new byte[] { 1, -1, 5 };
byte[] bytes2 = new byte[] { 1, -1, 5, 5, 1, 5, 1, 5 };
writer.writeBinary(0, bytes1);
writer.writeBinary(1, bytes2);
writer.complete();
Assert.assertArrayEquals(bytes1, row.getBinary(0));
Assert.assertArrayEquals(bytes2, row.getBinary(1));
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testGenericObject.
@Test
public void testGenericObject() throws Exception {
GenericTypeInfo<MyObj> info = new GenericTypeInfo<>(MyObj.class);
TypeSerializer<MyObj> genericSerializer = info.createSerializer(new ExecutionConfig());
RawValueDataSerializer<MyObj> binarySerializer = new RawValueDataSerializer<>(genericSerializer);
BinaryRowData row = new BinaryRowData(4);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeInt(0, 0);
RawValueData<MyObj> myObj1 = RawValueData.fromObject(new MyObj(0, 1));
writer.writeRawValue(1, myObj1, binarySerializer);
RawValueData<MyObj> myObj2 = RawValueData.fromObject(new MyObj(123, 5.0));
writer.writeRawValue(2, myObj2, binarySerializer);
RawValueData<MyObj> myObj3 = RawValueData.fromObject(new MyObj(1, 1));
writer.writeRawValue(3, myObj3, binarySerializer);
writer.complete();
assertTestGenericObjectRow(row, genericSerializer);
// getBytes from var-length memorySegments.
BinaryRowDataSerializer serializer = new BinaryRowDataSerializer(4);
MemorySegment[] memorySegments = new MemorySegment[3];
ArrayList<MemorySegment> memorySegmentList = new ArrayList<>();
for (int i = 0; i < 3; i++) {
memorySegments[i] = MemorySegmentFactory.wrap(new byte[64]);
memorySegmentList.add(memorySegments[i]);
}
RandomAccessOutputView out = new RandomAccessOutputView(memorySegments, 64);
serializer.serializeToPages(row, out);
BinaryRowData mapRow = serializer.createInstance();
mapRow = serializer.mapFromPages(mapRow, new RandomAccessInputView(memorySegmentList, 64));
assertTestGenericObjectRow(mapRow, genericSerializer);
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testGenericMap.
@Test
public void testGenericMap() {
Map<Object, Object> javaMap = new HashMap<>();
javaMap.put(6, fromString("6"));
javaMap.put(5, fromString("5"));
javaMap.put(666, fromString("666"));
javaMap.put(0, null);
GenericMapData genericMap = new GenericMapData(javaMap);
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter rowWriter = new BinaryRowWriter(row);
MapDataSerializer serializer = new MapDataSerializer(DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
rowWriter.writeMap(0, genericMap, serializer);
rowWriter.complete();
Map<Object, Object> map = convertToJavaMap(row.getMap(0), DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
assertEquals(fromString("6"), map.get(6));
assertEquals(fromString("5"), map.get(5));
assertEquals(fromString("666"), map.get(666));
assertTrue(map.containsKey(0));
assertNull(map.get(0));
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testSerializeVariousSize.
@Test
public void testSerializeVariousSize() throws IOException {
// in this test, we are going to start serializing from the i-th byte (i in 0...`segSize`)
// and the size of the row we're going to serialize is j bytes
// (j in `rowFixLength` to the maximum length we can write)
int segSize = 64;
int segTotalNumber = 3;
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
Random random = new Random();
byte[] bytes = new byte[1024];
random.nextBytes(bytes);
writer.writeBinary(0, bytes);
writer.complete();
MemorySegment[] memorySegments = new MemorySegment[segTotalNumber];
Map<MemorySegment, Integer> msIndex = new HashMap<>();
for (int i = 0; i < segTotalNumber; i++) {
memorySegments[i] = MemorySegmentFactory.wrap(new byte[segSize]);
msIndex.put(memorySegments[i], i);
}
BinaryRowDataSerializer serializer = new BinaryRowDataSerializer(1);
int rowSizeInt = 4;
// note that as there is only one field in the row, the fixed-length part is 16 bytes
// (header + 1 field)
int rowFixLength = 16;
for (int i = 0; i < segSize; i++) {
// this is the maximum row size we can serialize
// if we are going to serialize from the i-th byte of the input view
int maxRowSize = (segSize * segTotalNumber) - i - rowSizeInt;
if (segSize - i < rowFixLength + rowSizeInt) {
// oops, we can't write the whole fixed-length part in the first segment
// because the remaining space is too small, so we have to start serializing from
// the second segment.
// when serializing, we need to first write the length of the row,
// then write the fixed-length part of the row.
maxRowSize -= segSize - i;
}
for (int j = rowFixLength; j < maxRowSize; j++) {
// ok, now we're going to serialize a row of j bytes
testSerialize(row, memorySegments, msIndex, serializer, i, j);
}
}
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testHeader.
@Test
public void testHeader() {
BinaryRowData row = new BinaryRowData(2);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeInt(0, 10);
writer.setNullAt(1);
writer.writeRowKind(RowKind.UPDATE_BEFORE);
writer.complete();
BinaryRowData newRow = row.copy();
assertEquals(row, newRow);
assertEquals(RowKind.UPDATE_BEFORE, newRow.getRowKind());
newRow.setRowKind(RowKind.DELETE);
assertEquals(RowKind.DELETE, newRow.getRowKind());
}
Aggregations