use of com.linkedin.pinot.common.request.FilterQueryMap in project pinot by linkedin.
the class BrokerRequestHandler method attachTimeBoundary.
/**
* Attach time boundary to a broker request.
*
* @param hybridTableName hybrid table name.
* @param brokerRequest original broker request.
* @param isOfflineRequest flag for offline/realtime request.
*/
private void attachTimeBoundary(@Nonnull String hybridTableName, @Nonnull BrokerRequest brokerRequest, boolean isOfflineRequest) {
TimeBoundaryInfo timeBoundaryInfo = _timeBoundaryService.getTimeBoundaryInfoFor(TableNameBuilder.OFFLINE_TABLE_NAME_BUILDER.forTable(hybridTableName));
if (timeBoundaryInfo == null || timeBoundaryInfo.getTimeColumn() == null || timeBoundaryInfo.getTimeValue() == null) {
LOGGER.warn("No time boundary attached for table: {}", hybridTableName);
return;
}
// Create a range filter based on the request type.
String timeValue = timeBoundaryInfo.getTimeValue();
FilterQuery timeFilterQuery = new FilterQuery();
timeFilterQuery.setOperator(FilterOperator.RANGE);
timeFilterQuery.setColumn(timeBoundaryInfo.getTimeColumn());
timeFilterQuery.setNestedFilterQueryIds(new ArrayList<Integer>());
List<String> values = new ArrayList<>();
if (isOfflineRequest) {
values.add("(*\t\t" + timeValue + ")");
} else {
values.add("[" + timeValue + "\t\t*)");
}
timeFilterQuery.setValue(values);
timeFilterQuery.setId(-1);
// Attach the range filter to the current filter.
FilterQuery currentFilterQuery = brokerRequest.getFilterQuery();
if (currentFilterQuery != null) {
FilterQuery andFilterQuery = new FilterQuery();
andFilterQuery.setOperator(FilterOperator.AND);
List<Integer> nestedFilterQueryIds = new ArrayList<>();
nestedFilterQueryIds.add(currentFilterQuery.getId());
nestedFilterQueryIds.add(timeFilterQuery.getId());
andFilterQuery.setNestedFilterQueryIds(nestedFilterQueryIds);
andFilterQuery.setId(-2);
FilterQueryMap filterSubQueryMap = brokerRequest.getFilterSubQueryMap();
filterSubQueryMap.putToFilterQueryMap(timeFilterQuery.getId(), timeFilterQuery);
filterSubQueryMap.putToFilterQueryMap(andFilterQuery.getId(), andFilterQuery);
brokerRequest.setFilterQuery(andFilterQuery);
brokerRequest.setFilterSubQueryMap(filterSubQueryMap);
} else {
FilterQueryMap filterSubQueryMap = new FilterQueryMap();
filterSubQueryMap.putToFilterQueryMap(timeFilterQuery.getId(), timeFilterQuery);
brokerRequest.setFilterQuery(timeFilterQuery);
brokerRequest.setFilterSubQueryMap(filterSubQueryMap);
}
}
use of com.linkedin.pinot.common.request.FilterQueryMap in project pinot by linkedin.
the class RequestUtils method generateFilterFromTree.
/**
* Generates thrift compliant filterQuery and populate it in the broker request
* @param filterQueryTree
* @param request
*/
public static void generateFilterFromTree(FilterQueryTree filterQueryTree, BrokerRequest request) {
Map<Integer, FilterQuery> filterQueryMap = new HashMap<Integer, FilterQuery>();
FilterQuery root = traverseFilterQueryAndPopulateMap(filterQueryTree, filterQueryMap);
filterQueryMap.put(root.getId(), root);
request.setFilterQuery(root);
FilterQueryMap mp = new FilterQueryMap();
mp.setFilterQueryMap(filterQueryMap);
request.setFilterSubQueryMap(mp);
}
use of com.linkedin.pinot.common.request.FilterQueryMap in project pinot by linkedin.
the class BrokerRequestSerializationTest method testSerialization.
@Test
public static void testSerialization() throws TException {
BrokerRequest req = new BrokerRequest();
// Populate Query Type
QueryType type = new QueryType();
type.setHasAggregation(true);
type.setHasFilter(true);
type.setHasSelection(true);
type.setHasGroup_by(true);
req.setQueryType(type);
// Populate Query source
QuerySource s = new QuerySource();
s.setTableName("dummy");
req.setQuerySource(s);
req.setDuration("dummy");
req.setTimeInterval("dummy");
//Populate Group-By
GroupBy groupBy = new GroupBy();
List<String> columns = new ArrayList<String>();
columns.add("dummy1");
columns.add("dummy2");
groupBy.setColumns(columns);
groupBy.setTopN(100);
req.setGroupBy(groupBy);
//Populate Selections
Selection sel = new Selection();
sel.setSize(1);
SelectionSort s2 = new SelectionSort();
s2.setColumn("dummy1");
s2.setIsAsc(true);
sel.addToSelectionSortSequence(s2);
sel.addToSelectionColumns("dummy1");
req.setSelections(sel);
//Populate FilterQuery
FilterQuery q1 = new FilterQuery();
q1.setId(1);
q1.setColumn("dummy1");
q1.addToValue("dummy1");
q1.addToNestedFilterQueryIds(2);
q1.setOperator(FilterOperator.AND);
FilterQuery q2 = new FilterQuery();
q2.setId(2);
q2.setColumn("dummy2");
q2.addToValue("dummy2");
q2.setOperator(FilterOperator.AND);
FilterQueryMap map = new FilterQueryMap();
map.putToFilterQueryMap(1, q1);
map.putToFilterQueryMap(2, q2);
req.setFilterQuery(q1);
req.setFilterSubQueryMap(map);
//Populate Aggregations
AggregationInfo agg = new AggregationInfo();
agg.setAggregationType("dummy1");
agg.putToAggregationParams("key1", "dummy1");
req.addToAggregationsInfo(agg);
TSerializer normalSerializer = new TSerializer();
TSerializer compactSerializer = new TSerializer(new TCompactProtocol.Factory());
normalSerializer.serialize(req);
compactSerializer.serialize(req);
// int numRequests = 100000;
// TimerContext t = MetricsHelper.startTimer();
// TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());
// //TSerializer serializer = new TSerializer();
// //Compact : Size 183 , Serialization Latency : 0.03361ms
// // Normal : Size 385 , Serialization Latency : 0.01144ms
//
// for (int i = 0; i < numRequests; i++) {
// try {
// serializer.serialize(req);
// //System.out.println(s3.length);
// //break;
// } catch (TException e) {
// e.printStackTrace();
// }
// }
// t.stop();
// System.out.println("Latency is :" + (t.getLatencyMs() / (float) numRequests));
}
Aggregations