Search in sources :

Example 1 with TraceRunnable

use of com.linkedin.pinot.core.util.trace.TraceRunnable in project pinot by linkedin.

the class MCombineGroupByOperator method combineBlocks.

/**
   * This method combines the result blocks from underlying operators and builds a
   * merged, sorted and trimmed result block.
   * 1. Result blocks from underlying operators are merged concurrently into a
   *   HashMap, with appropriate synchronizations. Result blocks themselves are stored
   *   in the specified blocks[].
   *   - The key in this concurrent map is the group-by key, and value is an array of
   *     Objects (one for each aggregation function).
   *   - Synchronization is provided by locking the group-key that is to be modified.
   *
   * 2. The result of the concurrent map is then translated into what is expected by
   *    the broker (List<Map<String, Object>>).
   *
   * 3. This result is then sorted and then trimmed as per 'TOP N' in the brokerRequest.
   *
   * @return IntermediateResultBlock containing the final results from combine operation.
   */
private IntermediateResultsBlock combineBlocks() throws InterruptedException {
    int numOperators = _operators.size();
    final CountDownLatch operatorLatch = new CountDownLatch(numOperators);
    final Map<String, Object[]> resultsMap = new ConcurrentHashMap<>();
    final ConcurrentLinkedQueue<ProcessingException> mergedProcessingExceptions = new ConcurrentLinkedQueue<>();
    List<AggregationInfo> aggregationInfos = _brokerRequest.getAggregationsInfo();
    final AggregationFunctionContext[] aggregationFunctionContexts = AggregationFunctionUtils.getAggregationFunctionContexts(aggregationInfos, null);
    final int numAggregationFunctions = aggregationFunctionContexts.length;
    for (int i = 0; i < numOperators; i++) {
        final int index = i;
        _executorService.execute(new TraceRunnable() {

            @SuppressWarnings("unchecked")
            @Override
            public void runJob() {
                AggregationGroupByResult aggregationGroupByResult;
                try {
                    IntermediateResultsBlock intermediateResultsBlock = (IntermediateResultsBlock) _operators.get(index).nextBlock();
                    // Merge processing exceptions.
                    List<ProcessingException> processingExceptionsToMerge = intermediateResultsBlock.getProcessingExceptions();
                    if (processingExceptionsToMerge != null) {
                        mergedProcessingExceptions.addAll(processingExceptionsToMerge);
                    }
                    // Merge aggregation group-by result.
                    aggregationGroupByResult = intermediateResultsBlock.getAggregationGroupByResult();
                    if (aggregationGroupByResult != null) {
                        // Iterate over the group-by keys, for each key, update the group-by result in the resultsMap.
                        Iterator<GroupKeyGenerator.GroupKey> groupKeyIterator = aggregationGroupByResult.getGroupKeyIterator();
                        while (groupKeyIterator.hasNext()) {
                            GroupKeyGenerator.GroupKey groupKey = groupKeyIterator.next();
                            String groupKeyString = groupKey.getStringKey();
                            // HashCode method might return negative value, make it non-negative
                            int lockIndex = (groupKeyString.hashCode() & Integer.MAX_VALUE) % NUM_LOCKS;
                            synchronized (LOCKS[lockIndex]) {
                                Object[] results = resultsMap.get(groupKeyString);
                                if (results == null) {
                                    results = new Object[numAggregationFunctions];
                                    for (int j = 0; j < numAggregationFunctions; j++) {
                                        results[j] = aggregationGroupByResult.getResultForKey(groupKey, j);
                                    }
                                    resultsMap.put(groupKeyString, results);
                                } else {
                                    for (int j = 0; j < numAggregationFunctions; j++) {
                                        results[j] = aggregationFunctionContexts[j].getAggregationFunction().merge(results[j], aggregationGroupByResult.getResultForKey(groupKey, j));
                                    }
                                }
                            }
                        }
                    }
                } catch (Exception e) {
                    LOGGER.error("Exception processing CombineGroupBy for index {}, operator {}", index, _operators.get(index).getClass().getName(), e);
                    mergedProcessingExceptions.add(QueryException.getException(QueryException.QUERY_EXECUTION_ERROR, e));
                }
                operatorLatch.countDown();
            }
        });
    }
    boolean opCompleted = operatorLatch.await(_timeOutMs, TimeUnit.MILLISECONDS);
    if (!opCompleted) {
        // If this happens, the broker side should already timed out, just log the error in server side.
        LOGGER.error("Timed out while combining group-by results, after {}ms.", _timeOutMs);
        return new IntermediateResultsBlock(new TimeoutException("CombineGroupBy timed out."));
    }
    // Trim the results map.
    AggregationGroupByTrimmingService aggregationGroupByTrimmingService = new AggregationGroupByTrimmingService(aggregationFunctionContexts, (int) _brokerRequest.getGroupBy().getTopN());
    List<Map<String, Object>> trimmedResults = aggregationGroupByTrimmingService.trimIntermediateResultsMap(resultsMap);
    IntermediateResultsBlock mergedBlock = new IntermediateResultsBlock(aggregationFunctionContexts, trimmedResults, true);
    // Set the processing exceptions.
    if (!mergedProcessingExceptions.isEmpty()) {
        mergedBlock.setProcessingExceptions(new ArrayList<>(mergedProcessingExceptions));
    }
    // Set the execution statistics.
    ExecutionStatistics executionStatistics = new ExecutionStatistics();
    for (Operator operator : _operators) {
        ExecutionStatistics executionStatisticsToMerge = operator.getExecutionStatistics();
        if (executionStatisticsToMerge != null) {
            executionStatistics.merge(executionStatisticsToMerge);
        }
    }
    mergedBlock.setNumDocsScanned(executionStatistics.getNumDocsScanned());
    mergedBlock.setNumEntriesScannedInFilter(executionStatistics.getNumEntriesScannedInFilter());
    mergedBlock.setNumEntriesScannedPostFilter(executionStatistics.getNumEntriesScannedPostFilter());
    mergedBlock.setNumTotalRawDocs(executionStatistics.getNumTotalRawDocs());
    return mergedBlock;
}
Also used : Operator(com.linkedin.pinot.core.common.Operator) AggregationGroupByTrimmingService(com.linkedin.pinot.core.query.aggregation.groupby.AggregationGroupByTrimmingService) AggregationGroupByResult(com.linkedin.pinot.core.query.aggregation.groupby.AggregationGroupByResult) TraceRunnable(com.linkedin.pinot.core.util.trace.TraceRunnable) Iterator(java.util.Iterator) ArrayList(java.util.ArrayList) List(java.util.List) AggregationInfo(com.linkedin.pinot.common.request.AggregationInfo) IntermediateResultsBlock(com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) AggregationFunctionContext(com.linkedin.pinot.core.query.aggregation.AggregationFunctionContext) GroupKeyGenerator(com.linkedin.pinot.core.query.aggregation.groupby.GroupKeyGenerator) ProcessingException(com.linkedin.pinot.common.response.ProcessingException) TimeoutException(java.util.concurrent.TimeoutException) CountDownLatch(java.util.concurrent.CountDownLatch) TimeoutException(java.util.concurrent.TimeoutException) ProcessingException(com.linkedin.pinot.common.response.ProcessingException) QueryException(com.linkedin.pinot.common.exception.QueryException) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Example 2 with TraceRunnable

use of com.linkedin.pinot.core.util.trace.TraceRunnable in project pinot by linkedin.

the class MCombineOperator method getNextBlock.

@Override
public Block getNextBlock() {
    final long startTime = System.currentTimeMillis();
    final long queryEndTime = System.currentTimeMillis() + _timeOutMs;
    int numGroups = Math.max(MIN_THREADS_PER_QUERY, Math.min(MAX_THREADS_PER_QUERY, (_operators.size() + MIN_SEGMENTS_PER_THREAD - 1) / MIN_SEGMENTS_PER_THREAD));
    //ensure that the number of groups is not more than the number of segments
    numGroups = Math.min(_operators.size(), numGroups);
    final List<List<Operator>> operatorGroups = new ArrayList<List<Operator>>(numGroups);
    for (int i = 0; i < numGroups; i++) {
        operatorGroups.add(new ArrayList<Operator>());
    }
    for (int i = 0; i < _operators.size(); i++) {
        operatorGroups.get(i % numGroups).add(_operators.get(i));
    }
    final BlockingQueue<Block> blockingQueue = new ArrayBlockingQueue<>(operatorGroups.size());
    // Submit operators.
    for (final List<Operator> operatorGroup : operatorGroups) {
        _executorService.submit(new TraceRunnable() {

            @Override
            public void runJob() {
                IntermediateResultsBlock mergedBlock = null;
                try {
                    for (Operator operator : operatorGroup) {
                        IntermediateResultsBlock blockToMerge = (IntermediateResultsBlock) operator.nextBlock();
                        if (mergedBlock == null) {
                            mergedBlock = blockToMerge;
                        } else {
                            try {
                                CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
                            } catch (Exception e) {
                                LOGGER.error("Caught exception while merging two blocks (step 1).", e);
                                mergedBlock.addToProcessingExceptions(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
                            }
                        }
                    }
                } catch (Exception e) {
                    LOGGER.error("Caught exception while executing query.", e);
                    mergedBlock = new IntermediateResultsBlock(e);
                }
                blockingQueue.offer(mergedBlock);
            }
        });
    }
    LOGGER.debug("Submitting operators to be run in parallel and it took:" + (System.currentTimeMillis() - startTime));
    // Submit merger job:
    Future<IntermediateResultsBlock> mergedBlockFuture = _executorService.submit(new TraceCallable<IntermediateResultsBlock>() {

        @Override
        public IntermediateResultsBlock callJob() throws Exception {
            int mergedBlocksNumber = 0;
            IntermediateResultsBlock mergedBlock = null;
            while ((queryEndTime > System.currentTimeMillis()) && (mergedBlocksNumber < operatorGroups.size())) {
                if (mergedBlock == null) {
                    mergedBlock = (IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
                    if (mergedBlock != null) {
                        mergedBlocksNumber++;
                    }
                    LOGGER.debug("Got response from operator 0 after: {}", (System.currentTimeMillis() - startTime));
                } else {
                    IntermediateResultsBlock blockToMerge = (IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
                    if (blockToMerge != null) {
                        try {
                            LOGGER.debug("Got response from operator {} after: {}", mergedBlocksNumber, (System.currentTimeMillis() - startTime));
                            CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
                            LOGGER.debug("Merged response from operator {} after: {}", mergedBlocksNumber, (System.currentTimeMillis() - startTime));
                        } catch (Exception e) {
                            LOGGER.error("Caught exception while merging two blocks (step 2).", e);
                            mergedBlock.addToProcessingExceptions(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
                        }
                        mergedBlocksNumber++;
                    }
                }
            }
            return mergedBlock;
        }
    });
    // Get merge results.
    IntermediateResultsBlock mergedBlock;
    try {
        mergedBlock = mergedBlockFuture.get(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
    } catch (InterruptedException e) {
        LOGGER.error("Caught InterruptedException.", e);
        mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.FUTURE_CALL_ERROR, e));
    } catch (ExecutionException e) {
        LOGGER.error("Caught ExecutionException.", e);
        mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
    } catch (TimeoutException e) {
        LOGGER.error("Caught TimeoutException", e);
        mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.EXECUTION_TIMEOUT_ERROR, e));
    }
    // Update execution statistics.
    ExecutionStatistics executionStatistics = new ExecutionStatistics();
    for (Operator operator : _operators) {
        ExecutionStatistics executionStatisticsToMerge = operator.getExecutionStatistics();
        if (executionStatisticsToMerge != null) {
            executionStatistics.merge(executionStatisticsToMerge);
        }
    }
    mergedBlock.setNumDocsScanned(executionStatistics.getNumDocsScanned());
    mergedBlock.setNumEntriesScannedInFilter(executionStatistics.getNumEntriesScannedInFilter());
    mergedBlock.setNumEntriesScannedPostFilter(executionStatistics.getNumEntriesScannedPostFilter());
    mergedBlock.setNumTotalRawDocs(executionStatistics.getNumTotalRawDocs());
    return mergedBlock;
}
Also used : Operator(com.linkedin.pinot.core.common.Operator) ArrayList(java.util.ArrayList) TimeoutException(java.util.concurrent.TimeoutException) QueryException(com.linkedin.pinot.common.exception.QueryException) ExecutionException(java.util.concurrent.ExecutionException) TraceRunnable(com.linkedin.pinot.core.util.trace.TraceRunnable) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) Block(com.linkedin.pinot.core.common.Block) IntermediateResultsBlock(com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock) ArrayList(java.util.ArrayList) List(java.util.List) IntermediateResultsBlock(com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException)

Aggregations

QueryException (com.linkedin.pinot.common.exception.QueryException)2 Operator (com.linkedin.pinot.core.common.Operator)2 IntermediateResultsBlock (com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock)2 TraceRunnable (com.linkedin.pinot.core.util.trace.TraceRunnable)2 ArrayList (java.util.ArrayList)2 List (java.util.List)2 TimeoutException (java.util.concurrent.TimeoutException)2 AggregationInfo (com.linkedin.pinot.common.request.AggregationInfo)1 ProcessingException (com.linkedin.pinot.common.response.ProcessingException)1 Block (com.linkedin.pinot.core.common.Block)1 AggregationFunctionContext (com.linkedin.pinot.core.query.aggregation.AggregationFunctionContext)1 AggregationGroupByResult (com.linkedin.pinot.core.query.aggregation.groupby.AggregationGroupByResult)1 AggregationGroupByTrimmingService (com.linkedin.pinot.core.query.aggregation.groupby.AggregationGroupByTrimmingService)1 GroupKeyGenerator (com.linkedin.pinot.core.query.aggregation.groupby.GroupKeyGenerator)1 Iterator (java.util.Iterator)1 Map (java.util.Map)1 ArrayBlockingQueue (java.util.concurrent.ArrayBlockingQueue)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)1 CountDownLatch (java.util.concurrent.CountDownLatch)1