Search in sources :

Example 11 with BinaryRowWriter

use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.

the class BinaryRowDataTest method testBinary.

@Test
public void testBinary() {
    BinaryRowData row = new BinaryRowData(2);
    BinaryRowWriter writer = new BinaryRowWriter(row);
    byte[] bytes1 = new byte[] { 1, -1, 5 };
    byte[] bytes2 = new byte[] { 1, -1, 5, 5, 1, 5, 1, 5 };
    writer.writeBinary(0, bytes1);
    writer.writeBinary(1, bytes2);
    writer.complete();
    Assert.assertArrayEquals(bytes1, row.getBinary(0));
    Assert.assertArrayEquals(bytes2, row.getBinary(1));
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) Test(org.junit.Test)

Example 12 with BinaryRowWriter

use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.

the class BinaryRowDataTest method testGenericObject.

@Test
public void testGenericObject() throws Exception {
    GenericTypeInfo<MyObj> info = new GenericTypeInfo<>(MyObj.class);
    TypeSerializer<MyObj> genericSerializer = info.createSerializer(new ExecutionConfig());
    RawValueDataSerializer<MyObj> binarySerializer = new RawValueDataSerializer<>(genericSerializer);
    BinaryRowData row = new BinaryRowData(4);
    BinaryRowWriter writer = new BinaryRowWriter(row);
    writer.writeInt(0, 0);
    RawValueData<MyObj> myObj1 = RawValueData.fromObject(new MyObj(0, 1));
    writer.writeRawValue(1, myObj1, binarySerializer);
    RawValueData<MyObj> myObj2 = RawValueData.fromObject(new MyObj(123, 5.0));
    writer.writeRawValue(2, myObj2, binarySerializer);
    RawValueData<MyObj> myObj3 = RawValueData.fromObject(new MyObj(1, 1));
    writer.writeRawValue(3, myObj3, binarySerializer);
    writer.complete();
    assertTestGenericObjectRow(row, genericSerializer);
    // getBytes from var-length memorySegments.
    BinaryRowDataSerializer serializer = new BinaryRowDataSerializer(4);
    MemorySegment[] memorySegments = new MemorySegment[3];
    ArrayList<MemorySegment> memorySegmentList = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        memorySegments[i] = MemorySegmentFactory.wrap(new byte[64]);
        memorySegmentList.add(memorySegments[i]);
    }
    RandomAccessOutputView out = new RandomAccessOutputView(memorySegments, 64);
    serializer.serializeToPages(row, out);
    BinaryRowData mapRow = serializer.createInstance();
    mapRow = serializer.mapFromPages(mapRow, new RandomAccessInputView(memorySegmentList, 64));
    assertTestGenericObjectRow(mapRow, genericSerializer);
}
Also used : RandomAccessInputView(org.apache.flink.runtime.io.disk.RandomAccessInputView) ArrayList(java.util.ArrayList) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) RandomAccessOutputView(org.apache.flink.runtime.io.disk.RandomAccessOutputView) GenericTypeInfo(org.apache.flink.api.java.typeutils.GenericTypeInfo) MemorySegment(org.apache.flink.core.memory.MemorySegment) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) MyObj(org.apache.flink.table.data.util.DataFormatTestUtil.MyObj) BinaryRowDataSerializer(org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer) Test(org.junit.Test)

Example 13 with BinaryRowWriter

use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.

the class BinaryRowDataTest method testGenericMap.

@Test
public void testGenericMap() {
    Map<Object, Object> javaMap = new HashMap<>();
    javaMap.put(6, fromString("6"));
    javaMap.put(5, fromString("5"));
    javaMap.put(666, fromString("666"));
    javaMap.put(0, null);
    GenericMapData genericMap = new GenericMapData(javaMap);
    BinaryRowData row = new BinaryRowData(1);
    BinaryRowWriter rowWriter = new BinaryRowWriter(row);
    MapDataSerializer serializer = new MapDataSerializer(DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
    rowWriter.writeMap(0, genericMap, serializer);
    rowWriter.complete();
    Map<Object, Object> map = convertToJavaMap(row.getMap(0), DataTypes.INT().getLogicalType(), DataTypes.STRING().getLogicalType());
    assertEquals(fromString("6"), map.get(6));
    assertEquals(fromString("5"), map.get(5));
    assertEquals(fromString("666"), map.get(666));
    assertTrue(map.containsKey(0));
    assertNull(map.get(0));
}
Also used : HashMap(java.util.HashMap) MapDataSerializer(org.apache.flink.table.runtime.typeutils.MapDataSerializer) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) Test(org.junit.Test)

Example 14 with BinaryRowWriter

use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.

the class BinaryRowDataTest method testSerializeVariousSize.

@Test
public void testSerializeVariousSize() throws IOException {
    // in this test, we are going to start serializing from the i-th byte (i in 0...`segSize`)
    // and the size of the row we're going to serialize is j bytes
    // (j in `rowFixLength` to the maximum length we can write)
    int segSize = 64;
    int segTotalNumber = 3;
    BinaryRowData row = new BinaryRowData(1);
    BinaryRowWriter writer = new BinaryRowWriter(row);
    Random random = new Random();
    byte[] bytes = new byte[1024];
    random.nextBytes(bytes);
    writer.writeBinary(0, bytes);
    writer.complete();
    MemorySegment[] memorySegments = new MemorySegment[segTotalNumber];
    Map<MemorySegment, Integer> msIndex = new HashMap<>();
    for (int i = 0; i < segTotalNumber; i++) {
        memorySegments[i] = MemorySegmentFactory.wrap(new byte[segSize]);
        msIndex.put(memorySegments[i], i);
    }
    BinaryRowDataSerializer serializer = new BinaryRowDataSerializer(1);
    int rowSizeInt = 4;
    // note that as there is only one field in the row, the fixed-length part is 16 bytes
    // (header + 1 field)
    int rowFixLength = 16;
    for (int i = 0; i < segSize; i++) {
        // this is the maximum row size we can serialize
        // if we are going to serialize from the i-th byte of the input view
        int maxRowSize = (segSize * segTotalNumber) - i - rowSizeInt;
        if (segSize - i < rowFixLength + rowSizeInt) {
            // oops, we can't write the whole fixed-length part in the first segment
            // because the remaining space is too small, so we have to start serializing from
            // the second segment.
            // when serializing, we need to first write the length of the row,
            // then write the fixed-length part of the row.
            maxRowSize -= segSize - i;
        }
        for (int j = rowFixLength; j < maxRowSize; j++) {
            // ok, now we're going to serialize a row of j bytes
            testSerialize(row, memorySegments, msIndex, serializer, i, j);
        }
    }
}
Also used : Random(java.util.Random) HashMap(java.util.HashMap) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) MemorySegment(org.apache.flink.core.memory.MemorySegment) BinaryRowDataSerializer(org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer) Test(org.junit.Test)

Example 15 with BinaryRowWriter

use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.

the class BinaryRowDataTest method testHeader.

@Test
public void testHeader() {
    BinaryRowData row = new BinaryRowData(2);
    BinaryRowWriter writer = new BinaryRowWriter(row);
    writer.writeInt(0, 10);
    writer.setNullAt(1);
    writer.writeRowKind(RowKind.UPDATE_BEFORE);
    writer.complete();
    BinaryRowData newRow = row.copy();
    assertEquals(row, newRow);
    assertEquals(RowKind.UPDATE_BEFORE, newRow.getRowKind());
    newRow.setRowKind(RowKind.DELETE);
    assertEquals(RowKind.DELETE, newRow.getRowKind());
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) Test(org.junit.Test)

Aggregations

BinaryRowWriter (org.apache.flink.table.data.writer.BinaryRowWriter)54 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)53 Test (org.junit.Test)32 Random (java.util.Random)6 BinaryArrayData (org.apache.flink.table.data.binary.BinaryArrayData)6 BinaryRowDataSerializer (org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer)6 RowDataSerializer (org.apache.flink.table.runtime.typeutils.RowDataSerializer)6 MemorySegment (org.apache.flink.core.memory.MemorySegment)5 BinaryArrayWriter (org.apache.flink.table.data.writer.BinaryArrayWriter)5 ArrayDataSerializer (org.apache.flink.table.runtime.typeutils.ArrayDataSerializer)5 ArrayList (java.util.ArrayList)4 GenericRowData (org.apache.flink.table.data.GenericRowData)4 MapDataSerializer (org.apache.flink.table.runtime.typeutils.MapDataSerializer)4 LogicalType (org.apache.flink.table.types.logical.LogicalType)4 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)3 StringData.fromString (org.apache.flink.table.data.StringData.fromString)3 TimestampData (org.apache.flink.table.data.TimestampData)3 RawValueDataSerializer (org.apache.flink.table.runtime.typeutils.RawValueDataSerializer)3 BigDecimal (java.math.BigDecimal)2 HashMap (java.util.HashMap)2