use of org.apache.flink.table.data.binary.BinaryArrayData in project flink by apache.
the class MapDataSerializer method serialize.
@Override
public void serialize(MapData map, DataOutputView target) throws IOException {
BinaryMapData binaryMap = toBinaryMap(map);
final int size = binaryMap.size();
target.writeInt(size);
BinaryArrayData keyArray = binaryMap.keyArray();
BinaryArrayData valueArray = binaryMap.valueArray();
for (int i = 0; i < size; i++) {
if (keyArray.isNullAt(i)) {
throw new IllegalArgumentException("The key of BinaryMapData must not be null.");
}
Object key = keyGetter.getElementOrNull(keyArray, i);
keyTypeSerializer.serialize(key, target);
if (valueArray.isNullAt(i)) {
target.writeBoolean(true);
} else {
target.writeBoolean(false);
Object value = valueGetter.getElementOrNull(valueArray, i);
valueTypeSerializer.serialize(value, target);
}
}
}
use of org.apache.flink.table.data.binary.BinaryArrayData in project flink by apache.
the class RowDataTest method before.
@Before
public void before() {
str = StringData.fromString("haha");
generic = RawValueData.fromObject("haha");
genericSerializer = new RawValueDataSerializer<>(StringSerializer.INSTANCE);
decimal1 = DecimalData.fromUnscaledLong(10, 5, 0);
decimal2 = DecimalData.fromBigDecimal(new BigDecimal(11), 20, 0);
array = new BinaryArrayData();
{
BinaryArrayWriter arrayWriter = new BinaryArrayWriter(array, 2, 4);
arrayWriter.writeInt(0, 15);
arrayWriter.writeInt(1, 16);
arrayWriter.complete();
}
map = BinaryMapData.valueOf(array, array);
underRow = new BinaryRowData(2);
{
BinaryRowWriter writer = new BinaryRowWriter(underRow);
writer.writeInt(0, 15);
writer.writeInt(1, 16);
writer.complete();
}
bytes = new byte[] { 1, 5, 6 };
timestamp1 = TimestampData.fromEpochMillis(123L);
timestamp2 = TimestampData.fromLocalDateTime(LocalDateTime.of(1969, 1, 1, 0, 0, 0, 123456789));
}
use of org.apache.flink.table.data.binary.BinaryArrayData in project flink by apache.
the class SortCodeGeneratorTest method value1.
private Object value1(LogicalType type, Random rnd) {
switch(type.getTypeRoot()) {
case BOOLEAN:
return false;
case TINYINT:
return Byte.MIN_VALUE;
case SMALLINT:
return Short.MIN_VALUE;
case INTEGER:
return Integer.MIN_VALUE;
case BIGINT:
return Long.MIN_VALUE;
case FLOAT:
return Float.MIN_VALUE;
case DOUBLE:
return Double.MIN_VALUE;
case VARCHAR:
return StringData.fromString("");
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
return DecimalData.fromBigDecimal(new BigDecimal(Integer.MIN_VALUE), decimalType.getPrecision(), decimalType.getScale());
case TIMESTAMP_WITHOUT_TIME_ZONE:
return TimestampData.fromEpochMillis(Long.MIN_VALUE);
case ARRAY:
byte[] bytes = new byte[rnd.nextInt(7) + 1];
rnd.nextBytes(bytes);
BinaryArrayData array = BinaryArrayData.fromPrimitiveArray(bytes);
for (int i = 0; i < bytes.length; i++) {
array.setNullByte(i);
}
return array;
case VARBINARY:
byte[] bytes2 = new byte[rnd.nextInt(7) + 1];
rnd.nextBytes(bytes2);
return bytes2;
case ROW:
return GenericRowData.of(new Object[] { null });
case RAW:
return RawValueData.fromObject(rnd.nextInt());
default:
throw new RuntimeException("Not support!");
}
}
use of org.apache.flink.table.data.binary.BinaryArrayData in project flink by apache.
the class BinaryArrayDataTest method testTimestampData.
@Test
public void testTimestampData() {
BinaryArrayData array = new BinaryArrayData();
BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
// 1. compact
{
final int precision = 3;
writer.reset();
writer.writeTimestamp(0, TimestampData.fromEpochMillis(123L), precision);
writer.setNullAt(1);
writer.complete();
assertEquals("1970-01-01T00:00:00.123", array.getTimestamp(0, 3).toString());
assertTrue(array.isNullAt(1));
array.setTimestamp(0, TimestampData.fromEpochMillis(-123L), precision);
assertEquals("1969-12-31T23:59:59.877", array.getTimestamp(0, 3).toString());
}
// 2. not compact
{
final int precision = 9;
TimestampData timestamp1 = TimestampData.fromLocalDateTime(LocalDateTime.of(1970, 1, 1, 0, 0, 0, 123456789));
TimestampData timestamp2 = TimestampData.fromTimestamp(Timestamp.valueOf("1969-01-01 00:00:00.123456789"));
writer.reset();
writer.writeTimestamp(0, timestamp1, precision);
writer.writeTimestamp(1, null, precision);
writer.complete();
assertEquals("1970-01-01T00:00:00.123456789", array.getTimestamp(0, precision).toString());
assertTrue(array.isNullAt(1));
array.setTimestamp(0, timestamp2, precision);
assertEquals("1969-01-01T00:00:00.123456789", array.getTimestamp(0, precision).toString());
}
}
use of org.apache.flink.table.data.binary.BinaryArrayData in project flink by apache.
the class BinaryArrayDataTest method testToArray.
@Test
public void testToArray() {
BinaryArrayData array = new BinaryArrayData();
BinaryArrayWriter writer = new BinaryArrayWriter(array, 3, 2);
writer.writeShort(0, (short) 5);
writer.writeShort(1, (short) 10);
writer.writeShort(2, (short) 15);
writer.complete();
short[] shorts = array.toShortArray();
assertEquals(5, shorts[0]);
assertEquals(10, shorts[1]);
assertEquals(15, shorts[2]);
MemorySegment[] segments = splitBytes(writer.getSegments().getArray(), 3);
array.pointTo(segments, 3, array.getSizeInBytes());
assertEquals(5, array.getShort(0));
assertEquals(10, array.getShort(1));
assertEquals(15, array.getShort(2));
short[] shorts2 = array.toShortArray();
assertEquals(5, shorts2[0]);
assertEquals(10, shorts2[1]);
assertEquals(15, shorts2[2]);
}
Aggregations