use of org.apache.flink.table.data.MapData in project flink by apache.
the class RowDataToAvroConverters method createMapConverter.
private static RowDataToAvroConverter createMapConverter(LogicalType type) {
LogicalType valueType = extractValueTypeToAvroMap(type);
final ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
final RowDataToAvroConverter valueConverter = createConverter(valueType);
return new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
final Schema valueSchema = schema.getValueType();
final MapData mapData = (MapData) object;
final ArrayData keyArray = mapData.keyArray();
final ArrayData valueArray = mapData.valueArray();
final Map<Object, Object> map = new HashMap<>(mapData.size());
for (int i = 0; i < mapData.size(); ++i) {
final String key = keyArray.getString(i).toString();
final Object value = valueConverter.convert(valueSchema, valueGetter.getElementOrNull(valueArray, i));
map.put(key, value);
}
return map;
}
};
}
use of org.apache.flink.table.data.MapData in project flink by apache.
the class RowDataVectorizer method setColumn.
private static void setColumn(int rowId, MapColumnVector mapColumnVector, LogicalType type, RowData row, int columnId) {
MapData mapData = row.getMap(columnId);
MapType mapType = (MapType) type;
ArrayData keyArray = mapData.keyArray();
ArrayData valueArray = mapData.valueArray();
mapColumnVector.lengths[rowId] = mapData.size();
mapColumnVector.offsets[rowId] = mapColumnVector.childCount;
mapColumnVector.childCount += mapColumnVector.lengths[rowId];
mapColumnVector.keys.ensureSize(mapColumnVector.childCount, mapColumnVector.offsets[rowId] != 0);
mapColumnVector.values.ensureSize(mapColumnVector.childCount, mapColumnVector.offsets[rowId] != 0);
RowData convertedKeyRowData = convert(keyArray, mapType.getKeyType());
RowData convertedValueRowData = convert(valueArray, mapType.getValueType());
for (int i = 0; i < keyArray.size(); i++) {
setColumn((int) mapColumnVector.offsets[rowId] + i, mapColumnVector.keys, mapType.getKeyType(), convertedKeyRowData, i);
setColumn((int) mapColumnVector.offsets[rowId] + i, mapColumnVector.values, mapType.getValueType(), convertedValueRowData, i);
}
}
use of org.apache.flink.table.data.MapData in project flink by apache.
the class MapDataSerializerTest method getTestData.
@Override
protected MapData[] getTestData() {
Map<Object, Object> first = new HashMap<>();
first.put(1L, -100.1F);
BinaryArrayData keyBinary = BinaryArrayData.fromPrimitiveArray(new long[] { 10L, 20L });
BinaryArrayData valueBinary = new BinaryArrayData();
BinaryArrayWriter writer = new BinaryArrayWriter(valueBinary, 2, 4);
BinaryArrayWriter.NullSetter nullSetter = BinaryArrayWriter.createNullSetter(FLOAT);
writer.writeFloat(0, 10.2F);
nullSetter.setNull(writer, 1);
writer.complete();
return new MapData[] { new GenericMapData(first), BinaryMapData.valueOf(keyBinary, valueBinary) };
}
Aggregations