use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class TransformGroupByTest method executeGroupByQuery.
/**
* Helper method that executes the group by query on the index and returns the group by result.
*
* @param query Query to execute
* @return Group by result
*/
private AggregationGroupByResult executeGroupByQuery(IndexSegment indexSegment, String query) {
Operator filterOperator = new MatchEntireSegmentOperator(indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
List<AggregationInfo> aggregationsInfo = brokerRequest.getAggregationsInfo();
int numAggFunctions = aggregationsInfo.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFunctions];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFunctions; i++) {
AggregationInfo aggregationInfo = aggregationsInfo.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
GroupBy groupBy = brokerRequest.getGroupBy();
Set<String> expressions = new HashSet<>(groupBy.getExpressions());
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, TransformPlanNode.buildTransformExpressionTrees(expressions));
AggregationGroupByOperator groupByOperator = new AggregationGroupByOperator(aggrFuncContextArray, groupBy, Integer.MAX_VALUE, transformOperator, NUM_ROWS);
IntermediateResultsBlock block = (IntermediateResultsBlock) groupByOperator.nextBlock();
return block.getAggregationGroupByResult();
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class TransformExpressionOperatorTest method evaluateExpression.
/**
* Helper method to evaluate one expression using the TransformOperator.
* @param expression Expression to evaluate
* @return Result of evaluation
*/
private double[] evaluateExpression(String expression) {
Operator filterOperator = new MatchEntireSegmentOperator(_indexSegment.getSegmentMetadata().getTotalDocs());
final BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(filterOperator, _indexSegment.getSegmentMetadata().getTotalDocs(), NUM_ROWS);
final Map<String, BaseOperator> dataSourceMap = buildDataSourceMap(_indexSegment.getSegmentMetadata().getSchema());
final MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
Pql2Compiler compiler = new Pql2Compiler();
List<TransformExpressionTree> expressionTrees = new ArrayList<>(1);
expressionTrees.add(compiler.compileToExpressionTree(expression));
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, expressionTrees);
transformOperator.open();
TransformBlock transformBlock = (TransformBlock) transformOperator.getNextBlock();
BlockValSet blockValueSet = transformBlock.getBlockValueSet(expression);
double[] actual = blockValueSet.getDoubleValuesSV();
transformOperator.close();
return actual;
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class TransformExpressionParserTest method testGroupBy.
/**
* This test ensures that both column names and expressions can be parsed and populated into
* broker request correctly.
*/
@Test
public void testGroupBy() {
String[] expectedGroupByColumns = new String[] { "col1", "col2", "col3" };
String[] expectedGroupByExpressions = new String[] { "udf1(col2)", "ud2(udf3(col1, col2), col3)" };
String query = "select col1 from myTable group by " + StringUtil.join(",", expectedGroupByColumns) + "," + StringUtil.join(",", expectedGroupByExpressions);
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
List<String> actualGroupByColumns = brokerRequest.getGroupBy().getColumns();
for (int i = 0; i < actualGroupByColumns.size(); i++) {
Assert.assertEquals(actualGroupByColumns.get(i), expectedGroupByColumns[i]);
}
// Group by expression contains columns as well as expressions.
List<String> actualGroupByExpressions = brokerRequest.getGroupBy().getExpressions();
for (int i = 0; i < expectedGroupByColumns.length; i++) {
Assert.assertEquals(actualGroupByExpressions.get(i), expectedGroupByColumns[i]);
}
for (int i = 0; i < expectedGroupByExpressions.length; i++) {
int expressionIndex = i + expectedGroupByColumns.length;
Assert.assertEquals(actualGroupByExpressions.get(expressionIndex), expectedGroupByExpressions[i].replaceAll("\\s", ""));
}
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class BaseSumStarTreeIndexTest method testHardCodedQueries.
protected void testHardCodedQueries(IndexSegment segment, Schema schema) {
// Test against all metric columns, instead of just the aggregation column in the query.
List<String> metricNames = schema.getMetricNames();
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
for (int i = 0; i < _hardCodedQueries.length; i++) {
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(_hardCodedQueries[i]);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
Assert.assertTrue(RequestUtils.isFitForStarTreeIndex(segmentMetadata, filterQueryTree, brokerRequest));
Map<String, double[]> expectedResult = computeSumUsingRawDocs(segment, metricNames, brokerRequest);
Map<String, double[]> actualResult = computeSumUsingAggregatedDocs(segment, metricNames, brokerRequest);
Assert.assertEquals(expectedResult.size(), actualResult.size(), "Mis-match in number of groups");
for (Map.Entry<String, double[]> entry : expectedResult.entrySet()) {
String expectedKey = entry.getKey();
Assert.assertTrue(actualResult.containsKey(expectedKey));
double[] expectedSums = entry.getValue();
double[] actualSums = actualResult.get(expectedKey);
for (int j = 0; j < expectedSums.length; j++) {
Assert.assertEquals(actualSums[j], expectedSums[j], "Mis-match sum for key '" + expectedKey + "', Metric: " + metricNames.get(j) + ", Random Seed: " + _randomSeed);
}
}
}
}
Aggregations