use of com.linkedin.pinot.core.plan.AggregationFunctionInitializer in project pinot by linkedin.
the class AggregationFunctionUtils method getAggregationFunctionContexts.
@Nonnull
public static AggregationFunctionContext[] getAggregationFunctionContexts(@Nonnull List<AggregationInfo> aggregationInfos, @Nullable SegmentMetadata segmentMetadata) {
int numAggregationFunctions = aggregationInfos.size();
AggregationFunctionContext[] aggregationFunctionContexts = new AggregationFunctionContext[numAggregationFunctions];
for (int i = 0; i < numAggregationFunctions; i++) {
AggregationInfo aggregationInfo = aggregationInfos.get(i);
aggregationFunctionContexts[i] = AggregationFunctionContext.instantiate(aggregationInfo);
}
if (segmentMetadata != null) {
AggregationFunctionInitializer aggregationFunctionInitializer = new AggregationFunctionInitializer(segmentMetadata);
for (AggregationFunctionContext aggregationFunctionContext : aggregationFunctionContexts) {
aggregationFunctionContext.getAggregationFunction().accept(aggregationFunctionInitializer);
}
}
return aggregationFunctionContexts;
}
use of com.linkedin.pinot.core.plan.AggregationFunctionInitializer in project pinot by linkedin.
the class TransformGroupByTest method executeGroupByQuery.
/**
* Helper method that executes the group by query on the index and returns the group by result.
*
* @param query Query to execute
* @return Group by result
*/
private AggregationGroupByResult executeGroupByQuery(IndexSegment indexSegment, String query) {
Operator filterOperator = new MatchEntireSegmentOperator(indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
List<AggregationInfo> aggregationsInfo = brokerRequest.getAggregationsInfo();
int numAggFunctions = aggregationsInfo.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFunctions];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFunctions; i++) {
AggregationInfo aggregationInfo = aggregationsInfo.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
GroupBy groupBy = brokerRequest.getGroupBy();
Set<String> expressions = new HashSet<>(groupBy.getExpressions());
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, TransformPlanNode.buildTransformExpressionTrees(expressions));
AggregationGroupByOperator groupByOperator = new AggregationGroupByOperator(aggrFuncContextArray, groupBy, Integer.MAX_VALUE, transformOperator, NUM_ROWS);
IntermediateResultsBlock block = (IntermediateResultsBlock) groupByOperator.nextBlock();
return block.getAggregationGroupByResult();
}
use of com.linkedin.pinot.core.plan.AggregationFunctionInitializer in project pinot by linkedin.
the class DefaultAggregationExecutorTest method testAggregation.
/**
* Runs 'sum', 'min' & 'max' aggregation functions on the DefaultAggregationExecutor.
* Asserts that the aggregation results returned by the executor are as expected.
*/
@Test
void testAggregation() {
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
for (String column : _indexSegment.getColumnNames()) {
dataSourceMap.put(column, _indexSegment.getDataSource(column));
}
int totalRawDocs = _indexSegment.getSegmentMetadata().getTotalRawDocs();
MatchEntireSegmentOperator matchEntireSegmentOperator = new MatchEntireSegmentOperator(totalRawDocs);
BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(matchEntireSegmentOperator, totalRawDocs, 10000);
MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, Collections.<TransformExpressionTree>emptyList());
TransformBlock transformBlock = (TransformBlock) transformOperator.nextBlock();
int numAggFuncs = _aggregationInfoList.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFuncs];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(_indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFuncs; i++) {
AggregationInfo aggregationInfo = _aggregationInfoList.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
AggregationExecutor aggregationExecutor = new DefaultAggregationExecutor(aggrFuncContextArray);
aggregationExecutor.init();
aggregationExecutor.aggregate(transformBlock);
aggregationExecutor.finish();
List<Object> result = aggregationExecutor.getResult();
for (int i = 0; i < result.size(); i++) {
double actual = (double) result.get(i);
double expected = computeAggregation(AGGREGATION_FUNCTIONS[i], _inputData[i]);
Assert.assertEquals(actual, expected, "Aggregation mis-match for function " + AGGREGATION_FUNCTIONS[i] + ", Expected: " + expected + " Actual: " + actual);
}
}
Aggregations