use of com.linkedin.pinot.common.request.BrokerRequest in project pinot by linkedin.
the class Pql2CompilerTest method testQuotedStrings.
@Test
public void testQuotedStrings() {
Pql2Compiler compiler = new Pql2Compiler();
// Two single quotes in a single quoted string
BrokerRequest brokerRequest = compiler.compileToBrokerRequest("select * from vegetables where origin = 'Martha''s Vineyard'");
Assert.assertEquals(brokerRequest.getFilterQuery().getValue().get(0), "Martha's Vineyard");
brokerRequest = compiler.compileToBrokerRequest("select * from vegetables where origin = 'Martha\"\"s Vineyard'");
Assert.assertEquals(brokerRequest.getFilterQuery().getValue().get(0), "Martha\"\"s Vineyard");
brokerRequest = compiler.compileToBrokerRequest("select * from vegetables where origin = \"Martha\"\"s Vineyard\"");
Assert.assertEquals(brokerRequest.getFilterQuery().getValue().get(0), "Martha\"s Vineyard");
brokerRequest = compiler.compileToBrokerRequest("select * from vegetables where origin = \"Martha''s Vineyard\"");
Assert.assertEquals(brokerRequest.getFilterQuery().getValue().get(0), "Martha''s Vineyard");
}
use of com.linkedin.pinot.common.request.BrokerRequest in project pinot by linkedin.
the class BrokerRequestSerializationTest method testSerialization.
@Test
public static void testSerialization() throws TException {
BrokerRequest req = new BrokerRequest();
// Populate Query Type
QueryType type = new QueryType();
type.setHasAggregation(true);
type.setHasFilter(true);
type.setHasSelection(true);
type.setHasGroup_by(true);
req.setQueryType(type);
// Populate Query source
QuerySource s = new QuerySource();
s.setTableName("dummy");
req.setQuerySource(s);
req.setDuration("dummy");
req.setTimeInterval("dummy");
//Populate Group-By
GroupBy groupBy = new GroupBy();
List<String> columns = new ArrayList<String>();
columns.add("dummy1");
columns.add("dummy2");
groupBy.setColumns(columns);
groupBy.setTopN(100);
req.setGroupBy(groupBy);
//Populate Selections
Selection sel = new Selection();
sel.setSize(1);
SelectionSort s2 = new SelectionSort();
s2.setColumn("dummy1");
s2.setIsAsc(true);
sel.addToSelectionSortSequence(s2);
sel.addToSelectionColumns("dummy1");
req.setSelections(sel);
//Populate FilterQuery
FilterQuery q1 = new FilterQuery();
q1.setId(1);
q1.setColumn("dummy1");
q1.addToValue("dummy1");
q1.addToNestedFilterQueryIds(2);
q1.setOperator(FilterOperator.AND);
FilterQuery q2 = new FilterQuery();
q2.setId(2);
q2.setColumn("dummy2");
q2.addToValue("dummy2");
q2.setOperator(FilterOperator.AND);
FilterQueryMap map = new FilterQueryMap();
map.putToFilterQueryMap(1, q1);
map.putToFilterQueryMap(2, q2);
req.setFilterQuery(q1);
req.setFilterSubQueryMap(map);
//Populate Aggregations
AggregationInfo agg = new AggregationInfo();
agg.setAggregationType("dummy1");
agg.putToAggregationParams("key1", "dummy1");
req.addToAggregationsInfo(agg);
TSerializer normalSerializer = new TSerializer();
TSerializer compactSerializer = new TSerializer(new TCompactProtocol.Factory());
normalSerializer.serialize(req);
compactSerializer.serialize(req);
// int numRequests = 100000;
// TimerContext t = MetricsHelper.startTimer();
// TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());
// //TSerializer serializer = new TSerializer();
// //Compact : Size 183 , Serialization Latency : 0.03361ms
// // Normal : Size 385 , Serialization Latency : 0.01144ms
//
// for (int i = 0; i < numRequests; i++) {
// try {
// serializer.serialize(req);
// //System.out.println(s3.length);
// //break;
// } catch (TException e) {
// e.printStackTrace();
// }
// }
// t.stop();
// System.out.println("Latency is :" + (t.getLatencyMs() / (float) numRequests));
}
use of com.linkedin.pinot.common.request.BrokerRequest in project pinot by linkedin.
the class BaseHllStarTreeIndexTest method testHardCodedQueries.
void testHardCodedQueries(IndexSegment segment, Schema schema) throws Exception {
// only use metric corresponding to columnsToDeriveHllFields
List<String> metricNames = new ArrayList<>();
for (String column : columnsToDeriveHllFields) {
metricNames.add(column + HLL_CONFIG.getHllDeriveColumnSuffix());
}
SegmentMetadata segmentMetadata = segment.getSegmentMetadata();
LOGGER.info("[Schema] Dim: {} Metric: {}", schema.getDimensionNames(), schema.getMetricNames());
for (int i = 0; i < _hardCodedQueries.length; i++) {
Pql2Compiler compiler = new Pql2Compiler();
BrokerRequest brokerRequest = compiler.compileToBrokerRequest(_hardCodedQueries[i]);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
Assert.assertTrue(RequestUtils.isFitForStarTreeIndex(segmentMetadata, filterQueryTree, brokerRequest));
// Group -> Projected values of each group
Map<String, long[]> expectedResult = computeHllUsingRawDocs(segment, metricNames, brokerRequest);
Map<String, long[]> actualResult = computeHllUsingAggregatedDocs(segment, metricNames, brokerRequest);
Assert.assertEquals(expectedResult.size(), actualResult.size(), "Mis-match in number of groups");
for (Map.Entry<String, long[]> entry : expectedResult.entrySet()) {
String expectedKey = entry.getKey();
Assert.assertTrue(actualResult.containsKey(expectedKey));
long[] expectedSums = entry.getValue();
long[] actualSums = actualResult.get(expectedKey);
for (int j = 0; j < expectedSums.length; j++) {
LOGGER.info("actual hll: {} ", actualSums[j]);
LOGGER.info("expected hll: {} ", expectedSums[j]);
Assert.assertEquals(actualSums[j], expectedSums[j], "Mis-match hll for key '" + expectedKey + "', Metric: " + metricNames.get(j) + ", Random Seed: " + _randomSeed);
}
}
}
}
use of com.linkedin.pinot.common.request.BrokerRequest in project pinot by linkedin.
the class BaseQueriesTest method getBrokerResponseForQuery.
/**
* Run query on multiple index segments.
* <p>Use this to test the whole flow from server to broker.
* <p>The result should be equivalent to querying 4 identical index segments.
*
* @param query PQL query.
* @return broker response.
*/
protected BrokerResponseNative getBrokerResponseForQuery(String query) {
BrokerRequest brokerRequest = COMPILER.compileToBrokerRequest(query);
// Server side.
Plan plan = PLAN_MAKER.makeInterSegmentPlan(getSegmentDataManagers(), brokerRequest, EXECUTOR_SERVICE, 10_000);
plan.execute();
DataTable instanceResponse = plan.getInstanceResponse();
// Broker side.
BrokerReduceService brokerReduceService = new BrokerReduceService();
Map<ServerInstance, DataTable> dataTableMap = new HashMap<>();
dataTableMap.put(new ServerInstance("localhost:0000"), instanceResponse);
dataTableMap.put(new ServerInstance("localhost:1111"), instanceResponse);
return brokerReduceService.reduceOnDataTable(brokerRequest, dataTableMap);
}
use of com.linkedin.pinot.common.request.BrokerRequest in project pinot by linkedin.
the class FilterTreeOptimizationTest method testQuery.
/**
* Helper method to perform the actual testing for the given query.
* <ul>
* <li> Constructs the operator tree with and without alwaysFalse optimizations. </li>
* <li> Compares that all docIds filtered by the two operators are identical. </li>
* </ul>
* @param query Query to run.
*/
private void testQuery(String query) {
BrokerRequest brokerRequest = _compiler.compileToBrokerRequest(query);
FilterQueryTree filterQueryTree = RequestUtils.generateFilterQueryTree(brokerRequest);
BaseFilterOperator expectedOperator = FilterPlanNode.constructPhysicalOperator(filterQueryTree, _indexSegment, false);
BaseFilterOperator actualOperator = FilterPlanNode.constructPhysicalOperator(filterQueryTree, _indexSegment, true);
BaseFilterBlock expectedBlock;
while ((expectedBlock = expectedOperator.getNextBlock()) != null) {
BaseFilterBlock actualBlock = actualOperator.getNextBlock();
Assert.assertNotNull(actualBlock);
final BlockDocIdSet expectedDocIdSet = expectedBlock.getBlockDocIdSet();
final BlockDocIdIterator expectedIterator = expectedDocIdSet.iterator();
final BlockDocIdSet actualDocIdSet = actualBlock.getBlockDocIdSet();
final BlockDocIdIterator actualIterator = actualDocIdSet.iterator();
int expectedDocId;
int actualDocId;
while (((expectedDocId = expectedIterator.next()) != Constants.EOF) && ((actualDocId = actualIterator.next()) != Constants.EOF)) {
Assert.assertEquals(actualDocId, expectedDocId);
}
Assert.assertTrue(expectedIterator.next() == Constants.EOF);
Assert.assertTrue(actualIterator.next() == Constants.EOF);
}
}
Aggregations