use of org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer in project hive by apache.
the class GroupByOperator method estimateRowSize.
/**
* @return the size of each row
*/
private void estimateRowSize() throws HiveException {
// estimate the size of each entry -
// a datatype with unknown size (String/Struct etc. - is assumed to be 256
// bytes for now).
// 64 bytes is the overhead for a reference
fixedRowSize = javaHashEntryOverHead;
ArrayList<ExprNodeDesc> keys = conf.getKeys();
// track of the variable length keys
for (int pos = 0; pos < keys.size(); pos++) {
fixedRowSize += getSize(pos, keys.get(pos).getTypeInfo());
}
// Go over all the aggregation classes and and get the size of the fields of
// fixed length. Keep track of the variable length
// fields in these aggregation classes.
estimableAggregationEvaluators = new boolean[aggregationEvaluators.length];
for (int i = 0; i < aggregationEvaluators.length; i++) {
fixedRowSize += javaObjectOverHead;
AggregationBuffer agg = aggregationEvaluators[i].getNewAggregationBuffer();
if (GenericUDAFEvaluator.isEstimable(agg)) {
estimableAggregationEvaluators[i] = true;
continue;
}
Field[] fArr = ObjectInspectorUtils.getDeclaredNonStaticFields(agg.getClass());
for (Field f : fArr) {
fixedRowSize += getSize(i, f.getType(), f);
}
}
}
Aggregations