use of com.linkedin.pinot.common.request.InstanceRequest in project pinot by linkedin.
the class QueryExceptionTest method testSingleQuery.
@Test
public void testSingleQuery() throws Exception {
String query = "select count(*) from testTable where column1='24516187'";
LOGGER.info("running : " + query);
final Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
final BrokerRequest brokerRequest = REQUEST_COMPILER.compileToBrokerRequest(query);
InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
instanceRequest.getSearchSegments().add(segmentName);
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(queryRequest, queryRunners);
instanceResponseMap.clear();
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
final BrokerResponseNative brokerResponse = REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
}
use of com.linkedin.pinot.common.request.InstanceRequest in project pinot by linkedin.
the class QueryExceptionTest method testQueryParsingFailedQuery.
@Test
public void testQueryParsingFailedQuery() throws Exception {
String query = "select sudm(blablaa) from testTable where column1='24516187'";
LOGGER.info("running : " + query);
final Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
final BrokerRequest brokerRequest = REQUEST_COMPILER.compileToBrokerRequest(query);
InstanceRequest instanceRequest = new InstanceRequest(1, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
instanceRequest.getSearchSegments().add(segmentName);
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
final DataTable instanceResponse = QUERY_EXECUTOR.processQuery(queryRequest, queryRunners);
instanceResponseMap.clear();
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
final BrokerResponseNative brokerResponse = REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
LOGGER.info("BrokerResponse is {}", brokerResponse);
Assert.assertTrue(brokerResponse.getExceptionsSize() > 0);
}
use of com.linkedin.pinot.common.request.InstanceRequest in project pinot by linkedin.
the class RealtimeQueriesSentinelTest method test1.
@Test
public void test1() throws Exception {
final Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
String query = "select sum('count') from testTable where column13='1540094560' group by column3 top 100 limit 0";
Map<Object, Double> fromAvro = new HashMap<Object, Double>();
fromAvro.put(null, 2.0D);
fromAvro.put("", 1.2469280068E10D);
fromAvro.put("F", 127.0D);
fromAvro.put("A", 20.0D);
fromAvro.put("H", 29.0D);
final BrokerRequest brokerRequest = REQUEST_COMPILER.compileToBrokerRequest(query);
InstanceRequest instanceRequest = new InstanceRequest(485, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
instanceRequest.getSearchSegments().add("testTable_testTable");
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
DataTable instanceResponse = QUERY_EXECUTOR.processQuery(queryRequest, queryRunners);
instanceResponseMap.clear();
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
BrokerResponseNative brokerResponse = REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
JSONArray actual = brokerResponse.toJson().getJSONArray("aggregationResults").getJSONObject(0).getJSONArray("groupByResult");
assertGroupByResults(actual, fromAvro);
}
use of com.linkedin.pinot.common.request.InstanceRequest in project pinot by linkedin.
the class RealtimeQueriesSentinelTest method testAggregation.
@Test
public void testAggregation() throws Exception {
int counter = 0;
final Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
final List<TestSimpleAggreationQuery> aggCalls = AVRO_QUERY_GENERATOR.giveMeNSimpleAggregationQueries(10000);
for (final TestSimpleAggreationQuery aggCall : aggCalls) {
LOGGER.info("running " + counter + " : " + aggCall.pql);
final BrokerRequest brokerRequest = REQUEST_COMPILER.compileToBrokerRequest(aggCall.pql);
InstanceRequest instanceRequest = new InstanceRequest(counter++, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
instanceRequest.getSearchSegments().add("testTable_testTable");
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
DataTable instanceResponse = QUERY_EXECUTOR.processQuery(queryRequest, queryRunners);
instanceResponseMap.clear();
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
final BrokerResponseNative brokerResponse = REDUCE_SERVICE.reduceOnDataTable(brokerRequest, instanceResponseMap);
LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
LOGGER.info("Result from avro is : " + aggCall.result);
Double actual = Double.parseDouble(brokerResponse.getAggregationResults().get(0).getValue().toString());
Double expected = aggCall.result;
try {
Assert.assertEquals(actual, expected);
} catch (AssertionError e) {
System.out.println("********************************");
System.out.println("query : " + aggCall.pql);
System.out.println("actual : " + actual);
System.out.println("expected : " + aggCall.result);
System.out.println("********************************");
throw e;
}
}
}
use of com.linkedin.pinot.common.request.InstanceRequest in project pinot by linkedin.
the class ScheduledRequestHandler method serializeDataTable.
static byte[] serializeDataTable(QueryRequest queryRequest, DataTable instanceResponse) {
byte[] responseByte;
InstanceRequest instanceRequest = queryRequest.getInstanceRequest();
ServerMetrics metrics = queryRequest.getServerMetrics();
TimerContext timerContext = queryRequest.getTimerContext();
timerContext.startNewPhaseTimer(ServerQueryPhase.RESPONSE_SERIALIZATION);
long requestId = instanceRequest != null ? instanceRequest.getRequestId() : -1;
String brokerId = instanceRequest != null ? instanceRequest.getBrokerId() : "null";
try {
if (instanceResponse == null) {
LOGGER.warn("Instance response is null for requestId: {}, brokerId: {}", requestId, brokerId);
responseByte = new byte[0];
} else {
responseByte = instanceResponse.toBytes();
}
} catch (Exception e) {
metrics.addMeteredGlobalValue(ServerMeter.RESPONSE_SERIALIZATION_EXCEPTIONS, 1);
LOGGER.error("Got exception while serializing response for requestId: {}, brokerId: {}", requestId, brokerId, e);
responseByte = null;
}
timerContext.getPhaseTimer(ServerQueryPhase.RESPONSE_SERIALIZATION).stopAndRecord();
timerContext.startNewPhaseTimerAtNs(ServerQueryPhase.TOTAL_QUERY_TIME, timerContext.getQueryArrivalTimeNs());
timerContext.getPhaseTimer(ServerQueryPhase.TOTAL_QUERY_TIME).stopAndRecord();
return responseByte;
}
Aggregations