use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class StarTreeQueryGenerator method main.
/**
* Given star tree segments directory and number of queries, generate star tree queries.
* Usage: StarTreeQueryGenerator starTreeSegmentsDirectory numQueries
*
* @param args arguments.
*/
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: StarTreeQueryGenerator starTreeSegmentsDirectory numQueries");
return;
}
// Get segment metadata for the first segment to get table name and verify query is fit for star tree.
File segmentsDir = new File(args[0]);
Preconditions.checkState(segmentsDir.exists());
Preconditions.checkState(segmentsDir.isDirectory());
File[] segments = segmentsDir.listFiles();
Preconditions.checkNotNull(segments);
File segment = segments[0];
IndexSegment indexSegment = Loaders.IndexSegment.load(segment, ReadMode.heap);
SegmentMetadata segmentMetadata = indexSegment.getSegmentMetadata();
String tableName = segmentMetadata.getTableName();
// Set up star tree query generator.
int numQueries = Integer.parseInt(args[1]);
SegmentInfoProvider infoProvider = new SegmentInfoProvider(args[0]);
StarTreeQueryGenerator generator = new StarTreeQueryGenerator(tableName, infoProvider.getSingleValueDimensionColumns(), infoProvider.getMetricColumns(), infoProvider.getSingleValueDimensionValuesMap());
Pql2Compiler compiler = new Pql2Compiler();
for (int i = 0; i < numQueries; i++) {
String query = generator.nextQuery();
System.out.println(query);
// Verify that query is fit for star tree.
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);
Preconditions.checkState(RequestUtils.isFitForStarTreeIndex(segmentMetadata, RequestUtils.generateFilterQueryTree(brokerRequest), brokerRequest));
}
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class RangeMergeOptimizerTest method setup.
@BeforeClass
public void setup() {
_compiler = new Pql2Compiler();
_optimizer = new RangeMergeOptimizer();
_builder = new FilterQueryOptimizerRequest.FilterQueryOptimizerRequestBuilder();
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class BaseHllStarTreeIndexTest method testHardCodedQueries.
void testHardCodedQueries(IndexSegment segment, Schema schema) throws Exception {
// only use metric corresponding to columnsToDeriveHllFields
List<String> metricNames = new ArrayList<>();
for (String column : columnsToDeriveHllFields) {
metricNames.add(column + HLL_CONFIG.getHllDeriveColumnSuffix());
}
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
LOGGER.info("[Schema] Dim: {} Metric: {}", schema.getDimensionNames(), schema.getMetricNames());
for (int i = 0; i < _hardCodedQueries.length; i++) {
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(_hardCodedQueries[i]);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
Assert.assertTrue(RequestUtils.isFitForStarTreeIndex(segmentMetadata, filterQueryTree, brokerRequest));
// Group -> Projected values of each group
Map<String, long[]> expectedResult = computeHllUsingRawDocs(segment, metricNames, brokerRequest);
Map<String, long[]> actualResult = computeHllUsingAggregatedDocs(segment, metricNames, brokerRequest);
Assert.assertEquals(expectedResult.size(), actualResult.size(), "Mis-match in number of groups");
for (Map.Entry<String, long[]> entry : expectedResult.entrySet()) {
String expectedKey = entry.getKey();
Assert.assertTrue(actualResult.containsKey(expectedKey));
long[] expectedSums = entry.getValue();
long[] actualSums = actualResult.get(expectedKey);
for (int j = 0; j < expectedSums.length; j++) {
LOGGER.info("actual hll: {} ", actualSums[j]);
LOGGER.info("expected hll: {} ", expectedSums[j]);
Assert.assertEquals(actualSums[j], expectedSums[j], "Mis-match hll for key '" + expectedKey + "', Metric: " + metricNames.get(j) + ", Random Seed: " + _randomSeed);
}
}
}
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class TransformExpressionTreeTest method test.
/**
* This test validates an expression tree built by {@link TransformExpressionTree#buildTree(AstNode)}
*/
@Test
public void test() {
TransformFunctionFactory.init(new String[] { TransformFunctionFactoryTest.foo.class.getName(), TransformFunctionFactoryTest.bar.class.getName() });
Pql2Compiler compiler = new Pql2Compiler();
String expression = "foo(bar('a', foo(b, 'c', d)), e)";
TransformExpressionTree expressionTree = compiler.compileToExpressionTree(expression);
TransformFunction rootTransform = TransformFunctionFactory.get(expressionTree.getTransformName());
Assert.assertEquals(rootTransform.getName(), "foo");
List<TransformExpressionTree> firstChildren = expressionTree.getChildren();
Assert.assertEquals(firstChildren.size(), 2);
TransformExpressionTree firstChild = firstChildren.get(0);
Assert.assertEquals(firstChild.getTransformName(), "bar");
Assert.assertEquals(firstChildren.get(1).toString(), "e");
List<TransformExpressionTree> secondChildren = firstChild.getChildren();
Assert.assertEquals(secondChildren.size(), 2);
Assert.assertEquals(secondChildren.get(0).toString(), "a");
Assert.assertEquals(secondChildren.get(1).getTransformName(), "foo");
List<TransformExpressionTree> thirdChildren = secondChildren.get(1).getChildren();
Assert.assertEquals(thirdChildren.get(0).toString(), "b");
Assert.assertEquals(thirdChildren.get(1).toString(), "c");
Assert.assertEquals(thirdChildren.get(2).toString(), "d");
}
use of com.linkedin.pinot.pql.parsers.Pql2Compiler in project pinot by linkedin.
the class ScanBasedQueryProcessor method processQuery.
public QueryResponse processQuery(String query) throws Exception {
long startTimeInMillis = System.currentTimeMillis();
Pql2Compiler pql2Compiler = new Pql2Compiler();
BrokerRequest brokerRequest = pql2Compiler.compileToBrokerRequest(query);
ResultTable results = null;
Aggregation aggregation = null;
List<String> groupByColumns;
List<AggregationInfo> aggregationsInfo = brokerRequest.getAggregationsInfo();
if (aggregationsInfo != null) {
GroupBy groupBy = brokerRequest.getGroupBy();
groupByColumns = (brokerRequest.isSetGroupBy()) ? groupBy.getColumns() : null;
long topN = (groupByColumns != null) ? groupBy.getTopN() : 10;
aggregation = new Aggregation(brokerRequest.getAggregationsInfo(), groupByColumns, topN);
}
int numDocsScanned = 0;
int totalDocs = 0;
int numSegments = 0;
LOGGER.info("Processing Query: {}", query);
List<ResultTable> resultTables = processSegments(query, brokerRequest);
for (ResultTable segmentResults : resultTables) {
numDocsScanned += segmentResults.getNumDocsScanned();
totalDocs += segmentResults.getTotalDocs();
++numSegments;
results = (results == null) ? segmentResults : results.append(segmentResults);
}
if (aggregation != null && numSegments > 1 && numDocsScanned > 0) {
results = aggregation.aggregate(results);
}
if (results != null) {
results.setNumDocsScanned(numDocsScanned);
results.setTotalDocs(totalDocs);
long totalUsedMs = System.currentTimeMillis() - startTimeInMillis;
results.setProcessingTime(totalUsedMs);
results.seal();
}
QueryResponse queryResponse = new QueryResponse(results);
return queryResponse;
}
Aggregations