use of com.linkedin.pinot.core.common.Operator in project pinot by linkedin.
the class BaseSumStarTreeIndexTest method computeSumUsingRawDocs.
/**
* Helper method to compute the sums using raw index.
* @param metricNames
* @param brokerRequest
*/
private Map<String, double[]> computeSumUsingRawDocs(IndexSegment segment, List<String> metricNames, BrokerRequest brokerRequest) {
FilterPlanNode planNode = new FilterPlanNode(segment, brokerRequest);
Operator rawOperator = planNode.run();
BlockDocIdIterator rawDocIdIterator = rawOperator.nextBlock().getBlockDocIdSet().iterator();
List<String> groupByColumns = Collections.EMPTY_LIST;
if (brokerRequest.isSetAggregationsInfo() && brokerRequest.isSetGroupBy()) {
groupByColumns = brokerRequest.getGroupBy().getColumns();
}
return computeSum(segment, rawDocIdIterator, metricNames, groupByColumns);
}
use of com.linkedin.pinot.core.common.Operator in project pinot by linkedin.
the class AndOperator method nextFilterBlock.
@Override
public BaseFilterBlock nextFilterBlock(BlockId BlockId) {
List<FilterBlockDocIdSet> blockDocIdSets = new ArrayList<FilterBlockDocIdSet>();
for (Operator operator : operators) {
Block block = operator.nextBlock();
FilterBlockDocIdSet blockDocIdSet = (FilterBlockDocIdSet) block.getBlockDocIdSet();
blockDocIdSets.add(blockDocIdSet);
}
andBlock = new AndBlock(blockDocIdSets);
return andBlock;
}
use of com.linkedin.pinot.core.common.Operator in project pinot by linkedin.
the class TransformGroupByTest method executeGroupByQuery.
/**
* Helper method that executes the group by query on the index and returns the group by result.
*
* @param query Query to execute
* @return Group by result
*/
private AggregationGroupByResult executeGroupByQuery(IndexSegment indexSegment, String query) {
Operator filterOperator = new MatchEntireSegmentOperator(indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
List<AggregationInfo> aggregationsInfo = brokerRequest.getAggregationsInfo();
int numAggFunctions = aggregationsInfo.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFunctions];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFunctions; i++) {
AggregationInfo aggregationInfo = aggregationsInfo.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
GroupBy groupBy = brokerRequest.getGroupBy();
Set<String> expressions = new HashSet<>(groupBy.getExpressions());
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, TransformPlanNode.buildTransformExpressionTrees(expressions));
AggregationGroupByOperator groupByOperator = new AggregationGroupByOperator(aggrFuncContextArray, groupBy, Integer.MAX_VALUE, transformOperator, NUM_ROWS);
IntermediateResultsBlock block = (IntermediateResultsBlock) groupByOperator.nextBlock();
return block.getAggregationGroupByResult();
}
use of com.linkedin.pinot.core.common.Operator in project pinot by linkedin.
the class TransformExpressionOperatorTest method evaluateExpression.
/**
* Helper method to evaluate one expression using the TransformOperator.
* @param expression Expression to evaluate
* @return Result of evaluation
*/
private double[] evaluateExpression(String expression) {
Operator filterOperator = new MatchEntireSegmentOperator(_indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, _indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(_indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
List<TransformExpressionTree> expressionTrees = new ArrayList<>(1);
expressionTrees.add(compiler.compileToExpressionTree(expression));
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, expressionTrees);
transformOperator.open();
TransformBlock transformBlock = (TransformBlock) transformOperator.getNextBlock();
BlockValSet blockValueSet = transformBlock.getBlockValueSet(expression);
double[] actual = blockValueSet.getDoubleValuesSV();
transformOperator.close();
return actual;
}
Aggregations