use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testNestedRowWithBinaryRowEquals.
@Test
public void testNestedRowWithBinaryRowEquals() {
BinaryRowData nestedBinaryRow = new BinaryRowData(2);
{
BinaryRowWriter writer = new BinaryRowWriter(nestedBinaryRow);
writer.writeInt(0, 42);
LogicalType innerType = DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.STRING()), DataTypes.FIELD("f1", DataTypes.DOUBLE())).getLogicalType();
RowDataSerializer innerSerializer = (RowDataSerializer) (TypeSerializer<?>) InternalSerializers.create(innerType);
writer.writeRow(1, GenericRowData.of(StringData.fromString("Test"), 12.345), innerSerializer);
writer.complete();
}
BinaryRowData innerBinaryRow = new BinaryRowData(2);
{
BinaryRowWriter writer = new BinaryRowWriter(innerBinaryRow);
writer.writeString(0, StringData.fromString("Test"));
writer.writeDouble(1, 12.345);
writer.complete();
}
assertEquals(innerBinaryRow, nestedBinaryRow.getRow(1, 2));
assertEquals(nestedBinaryRow.getRow(1, 2), innerBinaryRow);
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryArrayDataTest method testMap.
@Test
public void testMap() {
BinaryArrayData array1 = new BinaryArrayData();
BinaryArrayWriter writer1 = new BinaryArrayWriter(array1, 3, 4);
writer1.writeInt(0, 6);
writer1.writeInt(1, 5);
writer1.writeInt(2, 666);
writer1.complete();
BinaryArrayData array2 = new BinaryArrayData();
BinaryArrayWriter writer2 = new BinaryArrayWriter(array2, 3, 8);
writer2.writeString(0, fromString("6"));
writer2.writeString(1, fromString("5"));
writer2.writeString(2, fromString("666"));
writer2.complete();
BinaryMapData binaryMap = BinaryMapData.valueOf(array1, array2);
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter rowWriter = new BinaryRowWriter(row);
rowWriter.writeMap(0, binaryMap, new MapDataSerializer(DataTypes.INT().getLogicalType(), DataTypes.INT().getLogicalType()));
rowWriter.complete();
BinaryMapData map = (BinaryMapData) row.getMap(0);
BinaryArrayData key = map.keyArray();
BinaryArrayData value = map.valueArray();
assertEquals(binaryMap, map);
assertEquals(array1, key);
assertEquals(array2, value);
assertEquals(key.getInt(1), 5);
assertEquals(value.getString(1), fromString("5"));
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class SumHashAggTestOperator method open.
@Override
public void open() throws Exception {
super.open();
aggregateMap = new BytesHashMap(getOwner(), getMemoryManager(), memorySize, keyTypes, aggBufferTypes);
currentKey = new BinaryRowData(1);
currentKeyWriter = new BinaryRowWriter(currentKey);
emptyAggBuffer = new BinaryRowData(1);
// for null value
BinaryRowWriter emptyAggBufferWriter = new BinaryRowWriter(emptyAggBuffer);
emptyAggBufferWriter.reset();
emptyAggBufferWriter.setNullAt(0);
emptyAggBufferWriter.complete();
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryHashTableTest method testRepeatBuildJoin.
@Test
public void testRepeatBuildJoin() throws Exception {
final int numKeys = 500;
final int probeValsPerKey = 1;
MemoryManager memManager = MemoryManagerBuilder.newBuilder().setMemorySize(40 * PAGE_SIZE).build();
MutableObjectIterator<BinaryRowData> buildInput = new MutableObjectIterator<BinaryRowData>() {
int cnt = 0;
@Override
public BinaryRowData next(BinaryRowData reuse) throws IOException {
return next();
}
@Override
public BinaryRowData next() throws IOException {
cnt++;
if (cnt > numKeys) {
return null;
}
BinaryRowData row = new BinaryRowData(2);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeInt(0, 1);
writer.writeInt(1, 1);
writer.complete();
return row;
}
};
MutableObjectIterator<BinaryRowData> probeInput = new UniformBinaryRowGenerator(numKeys, probeValsPerKey, true);
final BinaryHashTable table = new BinaryHashTable(conf, new Object(), buildSideSerializer, probeSideSerializer, new MyProjection(), new MyProjection(), memManager, 40 * PAGE_SIZE, ioManager, 24, 200000, true, HashJoinType.INNER, null, false, new boolean[] { true }, true);
int numRecordsInJoinResult = join(table, buildInput, probeInput, true);
Assert.assertEquals("Wrong number of records in join result.", 1, numRecordsInJoinResult);
table.close();
table.free();
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class DataFormatTestUtil method get24BytesBinaryRow.
/**
* Get a binary row of 24 bytes long.
*/
public static BinaryRowData get24BytesBinaryRow() {
// header (8 bytes) + 2 * string in fixed-length part (8 bytes each)
BinaryRowData row = new BinaryRowData(2);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeString(0, StringData.fromString(RandomStringUtils.randomNumeric(2)));
writer.writeString(1, StringData.fromString(RandomStringUtils.randomNumeric(2)));
writer.complete();
return row;
}
Aggregations