use of com.linkedin.pinot.common.utils.DataTable in project pinot by linkedin.
the class IntegrationTest method testSumQuery.
@Test
public void testSumQuery() {
BrokerRequest brokerRequest = getSumQuery();
QuerySource querySource = new QuerySource();
querySource.setTableName("testTable");
brokerRequest.setQuerySource(querySource);
InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
addTestTableSearchSegmentsToInstanceRequest(instanceRequest);
try {
QueryRequest queryRequest = new QueryRequest(instanceRequest, _serverInstance.getServerMetrics());
DataTable instanceResponse = _queryExecutor.processQuery(queryRequest, queryRunners);
// System.out.println(instanceResponse.getDouble(0, 0));
// System.out.println(instanceResponse.getMetadata().get(DataTable.TIME_USED_MS_METADATA_KEY));
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of com.linkedin.pinot.common.utils.DataTable in project pinot by linkedin.
the class BrokerReduceServiceTest method testSumQuery.
@Test
public void testSumQuery() {
BrokerRequest brokerRequest = getSumQuery();
QuerySource querySource = new QuerySource();
querySource.setTableName("midas");
brokerRequest.setQuerySource(querySource);
InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
for (IndexSegment segment : _indexSegmentList) {
instanceRequest.addToSearchSegments(segment.getSegmentName());
}
Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
try {
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
DataTable instanceResponse1 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse1);
DataTable instanceResponse2 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse2);
BrokerResponseNative brokerResponse = _reduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
LOGGER.info("Time used for BrokerResponse is " + brokerResponse.getTimeUsedMs());
} catch (Exception e) {
e.printStackTrace();
// Should never happen
throw new RuntimeException(e.toString(), e);
}
}
use of com.linkedin.pinot.common.utils.DataTable in project pinot by linkedin.
the class SelectionOperatorServiceTest method testCompatibleRowsDataTableTransformation.
@Test
public void testCompatibleRowsDataTableTransformation() throws Exception {
Collection<Serializable[]> rows = new ArrayList<>(2);
rows.add(_row1.clone());
rows.add(_compatibleRow1.clone());
DataSchema dataSchema = _dataSchema.clone();
Assert.assertTrue(dataSchema.isTypeCompatibleWith(_compatibleDataSchema));
dataSchema.upgradeToCover(_compatibleDataSchema);
Assert.assertEquals(dataSchema, _upgradedDataSchema);
DataTable dataTable = SelectionOperatorUtils.getDataTableFromRows(rows, dataSchema);
Serializable[] expectedRow1 = { 0L, 1.0, 2.0, 3.0, "4", new long[] { 5L }, new double[] { 6.0 }, new double[] { 7.0 }, new double[] { 8.0 }, new String[] { "9" } };
Serializable[] expectedCompatibleRow1 = { 1L, 2.0, 3.0, 4.0, "5", new long[] { 6L }, new double[] { 7.0 }, new double[] { 8.0 }, new double[] { 9.0 }, new String[] { "10" } };
Assert.assertEquals(SelectionOperatorUtils.extractRowFromDataTable(dataTable, 0), expectedRow1);
Assert.assertEquals(SelectionOperatorUtils.extractRowFromDataTable(dataTable, 1), expectedCompatibleRow1);
}
use of com.linkedin.pinot.common.utils.DataTable in project pinot by linkedin.
the class BrokerReduceServiceTest method testMinQuery.
@Test
public void testMinQuery() {
BrokerRequest brokerRequest = getMinQuery();
QuerySource querySource = new QuerySource();
querySource.setTableName("midas");
brokerRequest.setQuerySource(querySource);
InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
for (IndexSegment segment : _indexSegmentList) {
instanceRequest.addToSearchSegments(segment.getSegmentName());
}
Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
try {
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
DataTable instanceResponse1 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse1);
DataTable instanceResponse2 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse2);
BrokerResponseNative brokerResponse = (BrokerResponseNative) _reduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
LOGGER.info("BrokerResponse is " + brokerResponse.getAggregationResults().get(0));
LOGGER.info("Time used for BrokerResponse is " + brokerResponse.getTimeUsedMs());
} catch (Exception e) {
e.printStackTrace();
// Should never happen
throw new RuntimeException(e.toString(), e);
}
}
use of com.linkedin.pinot.common.utils.DataTable in project pinot by linkedin.
the class BrokerReduceServiceTest method testCountQuery.
@Test
public void testCountQuery() {
BrokerRequest brokerRequest = getCountQuery();
QuerySource querySource = new QuerySource();
querySource.setTableName("midas");
brokerRequest.setQuerySource(querySource);
InstanceRequest instanceRequest = new InstanceRequest(0, brokerRequest);
instanceRequest.setSearchSegments(new ArrayList<String>());
for (IndexSegment segment : _indexSegmentList) {
instanceRequest.addToSearchSegments(segment.getSegmentName());
}
Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
QueryRequest queryRequest = new QueryRequest(instanceRequest, TableDataManagerProvider.getServerMetrics());
DataTable instanceResponse1 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse1);
DataTable instanceResponse2 = _queryExecutor.processQuery(queryRequest, queryRunners);
instanceResponseMap.put(new ServerInstance("localhost:1111"), instanceResponse2);
BrokerResponseNative brokerResponse = _reduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
AggregationResult aggregationResult = brokerResponse.getAggregationResults().get(0);
LOGGER.info("BrokerResponse is " + aggregationResult);
checkAggregationResult(aggregationResult, "count_star", 800004.0);
LOGGER.info("Time used for BrokerResponse is " + brokerResponse.getTimeUsedMs());
}
Aggregations