use of org.apache.flink.table.runtime.operators.sort.IntNormalizedKeyComputer in project flink by apache.
the class SumHashAggTestOperator method processElement.
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
RowData in1 = element.getValue();
// project key from input
currentKeyWriter.reset();
if (in1.isNullAt(0)) {
currentKeyWriter.setNullAt(0);
} else {
currentKeyWriter.writeInt(0, in1.getInt(0));
}
currentKeyWriter.complete();
// look up output buffer using current group key
BytesMap.LookupInfo<BinaryRowData, BinaryRowData> lookupInfo = aggregateMap.lookup(currentKey);
BinaryRowData currentAggBuffer = lookupInfo.getValue();
if (!lookupInfo.isFound()) {
// append empty agg buffer into aggregate map for current group key
try {
currentAggBuffer = aggregateMap.append(lookupInfo, emptyAggBuffer);
} catch (EOFException exp) {
// hash map out of memory, spill to external sorter
if (sorter == null) {
sorter = new BufferedKVExternalSorter(getIOManager(), new BinaryRowDataSerializer(keyTypes.length), new BinaryRowDataSerializer(aggBufferTypes.length), new IntNormalizedKeyComputer(), new IntRecordComparator(), getMemoryManager().getPageSize(), getConf());
}
// sort and spill
sorter.sortAndSpill(aggregateMap.getRecordAreaMemorySegments(), aggregateMap.getNumElements(), new BytesHashMapSpillMemorySegmentPool(aggregateMap.getBucketAreaMemorySegments()));
// retry append
// reset aggregate map retry append
aggregateMap.reset();
lookupInfo = aggregateMap.lookup(currentKey);
try {
currentAggBuffer = aggregateMap.append(lookupInfo, emptyAggBuffer);
} catch (EOFException e) {
throw new OutOfMemoryError("BytesHashMap Out of Memory.");
}
}
}
if (!in1.isNullAt(1)) {
long sumInput = in1.getLong(1);
if (currentAggBuffer.isNullAt(0)) {
currentAggBuffer.setLong(0, sumInput);
} else {
currentAggBuffer.setLong(0, sumInput + currentAggBuffer.getLong(0));
}
}
}
Aggregations