use of com.linkedin.pinot.common.request.AggregationInfo in project pinot by linkedin.
the class BrokerRequestPreProcessor method rewriteFastHllColumnName.
/**
* Rewrite 'fasthll' column name.
*
* @param indexSegments list of index segments.
* @param aggregationsInfo list of aggregation info.
*/
private static void rewriteFastHllColumnName(List<IndexSegment> indexSegments, List<AggregationInfo> aggregationsInfo) {
// Consistent check.
for (AggregationInfo aggregationInfo : aggregationsInfo) {
if (aggregationInfo.getAggregationType().equalsIgnoreCase("fasthll")) {
String column = aggregationInfo.getAggregationParams().get("column").trim();
boolean isFirstSegment = true;
String firstSegmentName = null;
String hllDerivedColumn = null;
for (IndexSegment indexSegment : indexSegments) {
SegmentMetadata segmentMetadata = indexSegment.getSegmentMetadata();
if (isFirstSegment) {
// Use metadata from first index segment to perform rewrite.
isFirstSegment = false;
firstSegmentName = segmentMetadata.getName();
hllDerivedColumn = segmentMetadata.getDerivedColumn(column, MetricFieldSpec.DerivedMetricType.HLL);
if (hllDerivedColumn != null) {
aggregationInfo.getAggregationParams().put("column", hllDerivedColumn);
}
} else {
// Perform consistency check on other index segments.
String hllDerivedColumnToCheck = segmentMetadata.getDerivedColumn(column, MetricFieldSpec.DerivedMetricType.HLL);
if (!Objects.equals(hllDerivedColumn, hllDerivedColumnToCheck)) {
throw new RuntimeException("Found inconsistency HLL derived column name. In segment " + firstSegmentName + ": " + hllDerivedColumn + "; In segment " + segmentMetadata.getName() + ": " + hllDerivedColumnToCheck);
}
}
}
}
}
}
use of com.linkedin.pinot.common.request.AggregationInfo in project pinot by linkedin.
the class DefaultAggregationExecutorTest method testAggregation.
/**
* Runs 'sum', 'min' & 'max' aggregation functions on the DefaultAggregationExecutor.
* Asserts that the aggregation results returned by the executor are as expected.
*/
@Test
void testAggregation() {
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
for (String column : _indexSegment.getColumnNames()) {
dataSourceMap.put(column, _indexSegment.getDataSource(column));
}
int totalRawDocs = _indexSegment.getSegmentMetadata().getTotalRawDocs();
MatchEntireSegmentOperator matchEntireSegmentOperator = new MatchEntireSegmentOperator(totalRawDocs);
BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(matchEntireSegmentOperator, totalRawDocs, 10000);
MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, Collections.<TransformExpressionTree>emptyList());
TransformBlock transformBlock = (TransformBlock) transformOperator.nextBlock();
int numAggFuncs = _aggregationInfoList.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFuncs];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(_indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFuncs; i++) {
AggregationInfo aggregationInfo = _aggregationInfoList.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
AggregationExecutor aggregationExecutor = new DefaultAggregationExecutor(aggrFuncContextArray);
aggregationExecutor.init();
aggregationExecutor.aggregate(transformBlock);
aggregationExecutor.finish();
List<Object> result = aggregationExecutor.getResult();
for (int i = 0; i < result.size(); i++) {
double actual = (double) result.get(i);
double expected = computeAggregation(AGGREGATION_FUNCTIONS[i], _inputData[i]);
Assert.assertEquals(actual, expected, "Aggregation mis-match for function " + AGGREGATION_FUNCTIONS[i] + ", Expected: " + expected + " Actual: " + actual);
}
}
use of com.linkedin.pinot.common.request.AggregationInfo in project pinot by linkedin.
the class DefaultAggregationExecutorTest method init.
/**
* Initializations prior to the test:
* - Build a segment with metric columns (that will be aggregated) containing
* randomly generated data.
*
* @throws Exception
*/
@BeforeSuite
void init() throws Exception {
_random = new Random(System.currentTimeMillis());
_docIdSet = new int[NUM_ROWS];
int numColumns = AGGREGATION_FUNCTIONS.length;
_inputData = new double[numColumns][NUM_ROWS];
_columns = new String[numColumns];
setupSegment();
_aggregationInfoList = new ArrayList<>();
for (int i = 0; i < _columns.length; i++) {
AggregationInfo aggregationInfo = new AggregationInfo();
aggregationInfo.setAggregationType(AGGREGATION_FUNCTIONS[i]);
Map<String, String> params = new HashMap<String, String>();
params.put("column", _columns[i]);
aggregationInfo.setAggregationParams(params);
_aggregationInfoList.add(aggregationInfo);
}
}
use of com.linkedin.pinot.common.request.AggregationInfo in project pinot by linkedin.
the class InstanceServerStarter method getMaxQuery.
private static BrokerRequest getMaxQuery() {
BrokerRequest query = new BrokerRequest();
AggregationInfo aggregationInfo = getMaxAggregationInfo();
List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
aggregationsInfo.add(aggregationInfo);
query.setAggregationsInfo(aggregationsInfo);
FilterQuery filterQuery = getFilterQuery();
query.setFilterQuery(filterQuery);
return query;
}
use of com.linkedin.pinot.common.request.AggregationInfo in project pinot by linkedin.
the class InstanceServerStarter method getMinQuery.
private static BrokerRequest getMinQuery() {
BrokerRequest query = new BrokerRequest();
AggregationInfo aggregationInfo = getMinAggregationInfo();
List<AggregationInfo> aggregationsInfo = new ArrayList<AggregationInfo>();
aggregationsInfo.add(aggregationInfo);
query.setAggregationsInfo(aggregationsInfo);
FilterQuery filterQuery = getFilterQuery();
query.setFilterQuery(filterQuery);
return query;
}
Aggregations