use of com.linkedin.pinot.core.operator.blocks.TransformBlock in project pinot by linkedin.
the class AggregationOperator method getNextBlock.
@Override
public Block getNextBlock() {
int numDocsScanned = 0;
// Perform aggregation on all the blocks.
AggregationExecutor aggregationExecutor = new DefaultAggregationExecutor(_aggregationFunctionContexts);
aggregationExecutor.init();
TransformBlock transformBlock;
while ((transformBlock = (TransformBlock) _transformOperator.nextBlock()) != null) {
numDocsScanned += transformBlock.getNumDocs();
aggregationExecutor.aggregate(transformBlock);
}
aggregationExecutor.finish();
// Create execution statistics.
long numEntriesScannedInFilter = _transformOperator.getExecutionStatistics().getNumEntriesScannedInFilter();
long numEntriesScannedPostFilter = numDocsScanned * _transformOperator.getNumProjectionColumns();
_executionStatistics = new ExecutionStatistics(numDocsScanned, numEntriesScannedInFilter, numEntriesScannedPostFilter, _numTotalRawDocs);
// Build intermediate result block based on aggregation result from the executor.
return new IntermediateResultsBlock(_aggregationFunctionContexts, aggregationExecutor.getResult(), false);
}
use of com.linkedin.pinot.core.operator.blocks.TransformBlock in project pinot by linkedin.
the class NoDictionaryGroupKeyGeneratorTest method testGroupKeyGenerator.
private void testGroupKeyGenerator(String[] groupByColumns, FieldSpec.DataType[] dataTypes) throws Exception {
// Build the projection operator.
MatchEntireSegmentOperator matchEntireSegmentOperator = new MatchEntireSegmentOperator(NUM_ROWS);
BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(matchEntireSegmentOperator, NUM_ROWS, 10000);
MProjectionOperator projectionOperator = new MProjectionOperator(_dataSourceMap, docIdSetOperator);
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, new ArrayList<TransformExpressionTree>());
// Iterator over all projection blocks and generate group keys.
TransformBlock transformBlock;
int[] docIdToGroupKeys = new int[DocIdSetPlanNode.MAX_DOC_PER_CALL];
GroupKeyGenerator groupKeyGenerator = null;
while ((transformBlock = (TransformBlock) transformOperator.nextBlock()) != null) {
if (groupKeyGenerator == null) {
// Build the group key generator.
groupKeyGenerator = (groupByColumns.length == 1) ? new NoDictionarySingleColumnGroupKeyGenerator(groupByColumns[0], dataTypes[0]) : new NoDictionaryMultiColumnGroupKeyGenerator(transformBlock, groupByColumns);
}
groupKeyGenerator.generateKeysForBlock(transformBlock, docIdToGroupKeys);
}
// Assert total number of group keys is as expected
Assert.assertTrue(groupKeyGenerator != null);
Set<String> expectedGroupKeys = getExpectedGroupKeys(_recordReader, groupByColumns);
Assert.assertEquals(groupKeyGenerator.getCurrentGroupKeyUpperBound(), expectedGroupKeys.size(), "Number of group keys mis-match.");
// Assert all group key values are as expected
Iterator<GroupKeyGenerator.GroupKey> uniqueGroupKeys = groupKeyGenerator.getUniqueGroupKeys();
while (uniqueGroupKeys.hasNext()) {
GroupKeyGenerator.GroupKey groupKey = uniqueGroupKeys.next();
String actual = groupKey.getStringKey();
Assert.assertTrue(expectedGroupKeys.contains(actual), "Unexpected group key: " + actual);
}
}
use of com.linkedin.pinot.core.operator.blocks.TransformBlock in project pinot by linkedin.
the class DefaultGroupKeyGeneratorTest method setup.
@BeforeClass
private void setup() throws Exception {
GenericRow[] segmentData = new GenericRow[NUM_ROWS];
int value = _random.nextInt(MAX_STEP_LENGTH);
// Generate random values for the segment.
for (int i = 0; i < UNIQUE_ROWS; i++) {
Map<String, Object> map = new HashMap<>();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
map.put(singleValueColumn, value);
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
int numMultiValues = 1 + _random.nextInt(MAX_NUM_MULTI_VALUES);
Integer[] values = new Integer[numMultiValues];
for (int k = 0; k < numMultiValues; k++) {
values[k] = value;
value += 1 + _random.nextInt(MAX_STEP_LENGTH);
}
map.put(multiValueColumn, values);
}
GenericRow genericRow = new GenericRow();
genericRow.init(map);
segmentData[i] = genericRow;
}
for (int i = UNIQUE_ROWS; i < NUM_ROWS; i += UNIQUE_ROWS) {
System.arraycopy(segmentData, 0, segmentData, i, UNIQUE_ROWS);
}
// Create an index segment with the random values.
Schema schema = new Schema();
for (String singleValueColumn : SINGLE_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(singleValueColumn, FieldSpec.DataType.INT, true);
schema.addField(dimensionFieldSpec);
}
for (String multiValueColumn : MULTI_VALUE_COLUMNS) {
DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(multiValueColumn, FieldSpec.DataType.INT, false);
schema.addField(dimensionFieldSpec);
}
SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
FileUtils.deleteQuietly(new File(INDEX_DIR_PATH));
config.setOutDir(INDEX_DIR_PATH);
config.setSegmentName(SEGMENT_NAME);
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(config, new TestDataRecordReader(schema, segmentData));
driver.build();
IndexSegment indexSegment = Loaders.IndexSegment.load(new File(INDEX_DIR_PATH, SEGMENT_NAME), ReadMode.heap);
// Get a data fetcher for the index segment.
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
Map<String, Block> blockMap = new HashMap<>();
for (String column : indexSegment.getColumnNames()) {
DataSource dataSource = indexSegment.getDataSource(column);
dataSourceMap.put(column, dataSource);
blockMap.put(column, dataSource.getNextBlock());
}
// Generate a random test doc id set.
int num1 = _random.nextInt(50);
int num2 = num1 + 1 + _random.nextInt(50);
for (int i = 0; i < 20; i += 2) {
_testDocIdSet[i] = num1 + 50 * i;
_testDocIdSet[i + 1] = num2 + 50 * i;
}
DataFetcher dataFetcher = new DataFetcher(dataSourceMap);
DocIdSetBlock docIdSetBlock = new DocIdSetBlock(_testDocIdSet, _testDocIdSet.length);
ProjectionBlock projectionBlock = new ProjectionBlock(blockMap, new DataBlockCache(dataFetcher), docIdSetBlock);
_transformBlock = new TransformBlock(projectionBlock, new HashMap<String, BlockValSet>());
}
use of com.linkedin.pinot.core.operator.blocks.TransformBlock in project pinot by linkedin.
the class TransformExpressionOperator method getNextBlock.
@Override
public Block getNextBlock() {
ProjectionBlock projectionBlock = _projectionOperator.getNextBlock();
if (projectionBlock == null) {
return null;
}
Map<String, BlockValSet> expressionResults = (_expressionEvaluator != null) ? _expressionEvaluator.evaluate(projectionBlock) : null;
return new TransformBlock(projectionBlock, expressionResults);
}
use of com.linkedin.pinot.core.operator.blocks.TransformBlock in project pinot by linkedin.
the class DefaultAggregationExecutorTest method testAggregation.
/**
* Runs 'sum', 'min' & 'max' aggregation functions on the DefaultAggregationExecutor.
* Asserts that the aggregation results returned by the executor are as expected.
*/
@Test
void testAggregation() {
Map<String, BaseOperator> dataSourceMap = new HashMap<>();
for (String column : _indexSegment.getColumnNames()) {
dataSourceMap.put(column, _indexSegment.getDataSource(column));
}
int totalRawDocs = _indexSegment.getSegmentMetadata().getTotalRawDocs();
MatchEntireSegmentOperator matchEntireSegmentOperator = new MatchEntireSegmentOperator(totalRawDocs);
BReusableFilteredDocIdSetOperator docIdSetOperator = new BReusableFilteredDocIdSetOperator(matchEntireSegmentOperator, totalRawDocs, 10000);
MProjectionOperator projectionOperator = new MProjectionOperator(dataSourceMap, docIdSetOperator);
TransformExpressionOperator transformOperator = new TransformExpressionOperator(projectionOperator, Collections.<TransformExpressionTree>emptyList());
TransformBlock transformBlock = (TransformBlock) transformOperator.nextBlock();
int numAggFuncs = _aggregationInfoList.size();
AggregationFunctionContext[] aggrFuncContextArray = new AggregationFunctionContext[numAggFuncs];
AggregationFunctionInitializer aggFuncInitializer = new AggregationFunctionInitializer(_indexSegment.getSegmentMetadata());
for (int i = 0; i < numAggFuncs; i++) {
AggregationInfo aggregationInfo = _aggregationInfoList.get(i);
aggrFuncContextArray[i] = AggregationFunctionContext.instantiate(aggregationInfo);
aggrFuncContextArray[i].getAggregationFunction().accept(aggFuncInitializer);
}
AggregationExecutor aggregationExecutor = new DefaultAggregationExecutor(aggrFuncContextArray);
aggregationExecutor.init();
aggregationExecutor.aggregate(transformBlock);
aggregationExecutor.finish();
List<Object> result = aggregationExecutor.getResult();
for (int i = 0; i < result.size(); i++) {
double actual = (double) result.get(i);
double expected = computeAggregation(AGGREGATION_FUNCTIONS[i], _inputData[i]);
Assert.assertEquals(actual, expected, "Aggregation mis-match for function " + AGGREGATION_FUNCTIONS[i] + ", Expected: " + expected + " Actual: " + actual);
}
}
Aggregations