use of com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock in project pinot by linkedin.
the class AggregationGroupByOperator method getNextBlock.
@Override
public Block getNextBlock() {
int numDocsScanned = 0;
// Perform aggregation group-by on all the blocks.
GroupByExecutor groupByExecutor = new DefaultGroupByExecutor(_aggregationFunctionContexts, _groupBy, _numGroupsLimit);
groupByExecutor.init();
TransformBlock transformBlock;
while ((transformBlock = (TransformBlock) _transformOperator.nextBlock()) != null) {
numDocsScanned += transformBlock.getNumDocs();
groupByExecutor.process(transformBlock);
}
groupByExecutor.finish();
// Create execution statistics.
long numEntriesScannedInFilter = _transformOperator.getExecutionStatistics().getNumEntriesScannedInFilter();
long numEntriesScannedPostFilter = numDocsScanned * _transformOperator.getNumProjectionColumns();
_executionStatistics = new ExecutionStatistics(numDocsScanned, numEntriesScannedInFilter, numEntriesScannedPostFilter, _numTotalRawDocs);
// Build intermediate result block based on aggregation group-by result from the executor.
return new IntermediateResultsBlock(_aggregationFunctionContexts, groupByExecutor.getResult());
}
use of com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock in project pinot by linkedin.
the class TransformGroupByTest method executeGroupByQuery.
/**
* Helper method that executes the group by query on the index and returns the group by result.
*
* @param query Query to execute
* @return Group by result
*/
private AggregationGroupByResult executeGroupByQuery(IndexSegment indexSegment, String query) {
Operator filterOperator = new MatchEntireSegmentOperator(indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
List<AggregationInfo> aggregationsInfo = brokerRequest.getAggregationsInfo();
int numAggFunctions = aggregationsInfo.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFunctions];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFunctions; i++) {
AggregationInfo aggregationInfo = aggregationsInfo.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
GroupBy groupBy = brokerRequest.getGroupBy();
Set<String> expressions = new HashSet<>(groupBy.getExpressions());
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, TransformPlanNode.buildTransformExpressionTrees(expressions));
AggregationGroupByOperator groupByOperator = new AggregationGroupByOperator(aggrFuncContextArray, groupBy, Integer.MAX_VALUE, transformOperator, NUM_ROWS);
IntermediateResultsBlock block = (IntermediateResultsBlock) groupByOperator.nextBlock();
return block.getAggregationGroupByResult();
}
Aggregations