use of org.elasticsearch.common.util.LongObjectPagedHashMap in project elasticsearch by elastic.
the class InternalGeoHashGrid method doReduce.
@Override
public InternalGeoHashGrid doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
LongObjectPagedHashMap<List<Bucket>> buckets = null;
for (InternalAggregation aggregation : aggregations) {
InternalGeoHashGrid grid = (InternalGeoHashGrid) aggregation;
if (buckets == null) {
buckets = new LongObjectPagedHashMap<>(grid.buckets.size(), reduceContext.bigArrays());
}
for (Bucket bucket : grid.buckets) {
List<Bucket> existingBuckets = buckets.get(bucket.geohashAsLong);
if (existingBuckets == null) {
existingBuckets = new ArrayList<>(aggregations.size());
buckets.put(bucket.geohashAsLong, existingBuckets);
}
existingBuckets.add(bucket);
}
}
final int size = Math.toIntExact(reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size()));
BucketPriorityQueue ordered = new BucketPriorityQueue(size);
for (LongObjectPagedHashMap.Cursor<List<Bucket>> cursor : buckets) {
List<Bucket> sameCellBuckets = cursor.value;
ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext));
}
buckets.close();
Bucket[] list = new Bucket[ordered.size()];
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = ordered.pop();
}
return new InternalGeoHashGrid(getName(), requiredSize, Arrays.asList(list), pipelineAggregators(), getMetaData());
}
Aggregations