use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testZeroOutPaddingString.
@Test
public void testZeroOutPaddingString() {
Random random = new Random();
byte[] bytes = new byte[1024];
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.reset();
random.nextBytes(bytes);
writer.writeBinary(0, bytes);
writer.reset();
writer.writeString(0, fromString("wahahah"));
writer.complete();
int hash1 = row.hashCode();
writer.reset();
random.nextBytes(bytes);
writer.writeBinary(0, bytes);
writer.reset();
writer.writeString(0, fromString("wahahah"));
writer.complete();
int hash2 = row.hashCode();
assertEquals(hash1, hash2);
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testSingleSegmentBinaryRowHashCode.
@Test
public void testSingleSegmentBinaryRowHashCode() {
final Random rnd = new Random(System.currentTimeMillis());
// test hash stabilization
BinaryRowData row = new BinaryRowData(13);
BinaryRowWriter writer = new BinaryRowWriter(row);
for (int i = 0; i < 99; i++) {
writer.reset();
writer.writeString(0, fromString("" + rnd.nextInt()));
writer.writeString(3, fromString("01234567"));
writer.writeString(5, fromString("012345678"));
writer.writeString(9, fromString("啦啦啦啦啦我是快乐的粉刷匠"));
writer.writeBoolean(1, true);
writer.writeByte(2, (byte) 99);
writer.writeDouble(6, 87.1d);
writer.writeFloat(7, 26.1f);
writer.writeInt(8, 88);
writer.writeLong(10, 284);
writer.writeShort(11, (short) 292);
writer.setNullAt(12);
writer.complete();
BinaryRowData copy = row.copy();
assertEquals(row.hashCode(), copy.hashCode());
}
// test hash distribution
int count = 999999;
Set<Integer> hashCodes = new HashSet<>(count);
for (int i = 0; i < count; i++) {
row.setInt(8, i);
hashCodes.add(row.hashCode());
}
assertEquals(count, hashCodes.size());
hashCodes.clear();
row = new BinaryRowData(1);
writer = new BinaryRowWriter(row);
for (int i = 0; i < count; i++) {
writer.reset();
writer.writeString(0, fromString("啦啦啦啦啦我是快乐的粉刷匠" + i));
writer.complete();
hashCodes.add(row.hashCode());
}
Assert.assertTrue(hashCodes.size() > count * 0.997);
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testWriter.
@Test
public void testWriter() {
int arity = 13;
BinaryRowData row = new BinaryRowData(arity);
BinaryRowWriter writer = new BinaryRowWriter(row, 20);
writer.writeString(0, fromString("1"));
writer.writeString(3, fromString("1234567"));
writer.writeString(5, fromString("12345678"));
writer.writeString(9, fromString("啦啦啦啦啦我是快乐的粉刷匠"));
writer.writeBoolean(1, true);
writer.writeByte(2, (byte) 99);
writer.writeDouble(6, 87.1d);
writer.writeFloat(7, 26.1f);
writer.writeInt(8, 88);
writer.writeLong(10, 284);
writer.writeShort(11, (short) 292);
writer.setNullAt(12);
writer.complete();
assertTestWriterRow(row);
assertTestWriterRow(row.copy());
// test copy from var segments.
int subSize = row.getFixedLengthPartSize() + 10;
MemorySegment subMs1 = MemorySegmentFactory.wrap(new byte[subSize]);
MemorySegment subMs2 = MemorySegmentFactory.wrap(new byte[subSize]);
row.getSegments()[0].copyTo(0, subMs1, 0, subSize);
row.getSegments()[0].copyTo(subSize, subMs2, 0, row.getSizeInBytes() - subSize);
BinaryRowData toCopy = new BinaryRowData(arity);
toCopy.pointTo(new MemorySegment[] { subMs1, subMs2 }, 0, row.getSizeInBytes());
assertEquals(row, toCopy);
assertTestWriterRow(toCopy);
assertTestWriterRow(toCopy.copy(new BinaryRowData(arity)));
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testDateAndTimeAsGenericObject.
@Test
public void testDateAndTimeAsGenericObject() {
BinaryRowData row = new BinaryRowData(7);
BinaryRowWriter writer = new BinaryRowWriter(row);
LocalDate localDate = LocalDate.of(2019, 7, 16);
LocalTime localTime = LocalTime.of(17, 31);
LocalDateTime localDateTime = LocalDateTime.of(localDate, localTime);
writer.writeInt(0, 0);
writer.writeRawValue(1, RawValueData.fromObject(new Date(123)), new RawValueDataSerializer<>(SqlDateSerializer.INSTANCE));
writer.writeRawValue(2, RawValueData.fromObject(new Time(456)), new RawValueDataSerializer<>(SqlTimeSerializer.INSTANCE));
writer.writeRawValue(3, RawValueData.fromObject(new Timestamp(789)), new RawValueDataSerializer<>(SqlTimestampSerializer.INSTANCE));
writer.writeRawValue(4, RawValueData.fromObject(localDate), new RawValueDataSerializer<>(LocalDateSerializer.INSTANCE));
writer.writeRawValue(5, RawValueData.fromObject(localTime), new RawValueDataSerializer<>(LocalTimeSerializer.INSTANCE));
writer.writeRawValue(6, RawValueData.fromObject(localDateTime), new RawValueDataSerializer<>(LocalDateTimeSerializer.INSTANCE));
writer.complete();
assertEquals(new Date(123), row.<Date>getRawValue(1).toObject(SqlDateSerializer.INSTANCE));
assertEquals(new Time(456), row.<Time>getRawValue(2).toObject(SqlTimeSerializer.INSTANCE));
assertEquals(new Timestamp(789), row.<Timestamp>getRawValue(3).toObject(SqlTimestampSerializer.INSTANCE));
assertEquals(localDate, row.<LocalDate>getRawValue(4).toObject(LocalDateSerializer.INSTANCE));
assertEquals(localTime, row.<LocalTime>getRawValue(5).toObject(LocalTimeSerializer.INSTANCE));
assertEquals(localDateTime, row.<LocalDateTime>getRawValue(6).toObject(LocalDateTimeSerializer.INSTANCE));
}
use of org.apache.flink.table.data.writer.BinaryRowWriter in project flink by apache.
the class BinaryRowDataTest method testZeroOutPaddingGeneric.
@Test
public void testZeroOutPaddingGeneric() {
GenericTypeInfo<MyObj> info = new GenericTypeInfo<>(MyObj.class);
TypeSerializer<MyObj> genericSerializer = info.createSerializer(new ExecutionConfig());
Random random = new Random();
byte[] bytes = new byte[1024];
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
// let's random the bytes
writer.reset();
random.nextBytes(bytes);
writer.writeBinary(0, bytes);
writer.reset();
writer.writeRawValue(0, RawValueData.fromObject(new MyObj(0, 1)), new RawValueDataSerializer<>(genericSerializer));
writer.complete();
int hash1 = row.hashCode();
writer.reset();
random.nextBytes(bytes);
writer.writeBinary(0, bytes);
writer.reset();
writer.writeRawValue(0, RawValueData.fromObject(new MyObj(0, 1)), new RawValueDataSerializer<>(genericSerializer));
writer.complete();
int hash2 = row.hashCode();
assertEquals(hash1, hash2);
}
Aggregations