Search in sources :

Example 1 with GroupByQuery

use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.

the class StringColumnAggregationTest method testGroupBy.

@Test
public void testGroupBy() {
    GroupByQuery query = new GroupByQuery.Builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval("1970/2050").setAggregatorSpecs(new DoubleSumAggregatorFactory("singleDoubleSum", singleValue), new DoubleSumAggregatorFactory("multiDoubleSum", multiValue), new DoubleMaxAggregatorFactory("singleDoubleMax", singleValue), new DoubleMaxAggregatorFactory("multiDoubleMax", multiValue), new DoubleMinAggregatorFactory("singleDoubleMin", singleValue), new DoubleMinAggregatorFactory("multiDoubleMin", multiValue), new FloatSumAggregatorFactory("singleFloatSum", singleValue), new FloatSumAggregatorFactory("multiFloatSum", multiValue), new FloatMaxAggregatorFactory("singleFloatMax", singleValue), new FloatMaxAggregatorFactory("multiFloatMax", multiValue), new FloatMinAggregatorFactory("singleFloatMin", singleValue), new FloatMinAggregatorFactory("multiFloatMin", multiValue), new LongSumAggregatorFactory("singleLongSum", singleValue), new LongSumAggregatorFactory("multiLongSum", multiValue), new LongMaxAggregatorFactory("singleLongMax", singleValue), new LongMaxAggregatorFactory("multiLongMax", multiValue), new LongMinAggregatorFactory("singleLongMin", singleValue), new LongMinAggregatorFactory("multiLongMin", multiValue), new LongSumAggregatorFactory("count", "count")).build();
    Sequence<ResultRow> seq = aggregationTestHelper.runQueryOnSegmentsObjs(segments, query);
    Row result = Iterables.getOnlyElement(seq.toList()).toMapBasedRow(query);
    Assert.assertEquals(numRows, result.getMetric("count").longValue());
    Assert.assertEquals(singleValueSum, result.getMetric("singleDoubleSum").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueSum, result.getMetric("multiDoubleSum").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueMax, result.getMetric("singleDoubleMax").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueMax, result.getMetric("multiDoubleMax").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueMin, result.getMetric("singleDoubleMin").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueMin, result.getMetric("multiDoubleMin").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueSum, result.getMetric("singleFloatSum").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueSum, result.getMetric("multiFloatSum").floatValue(), 0.0001f);
    Assert.assertEquals(singleValueMax, result.getMetric("singleFloatMax").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueMax, result.getMetric("multiFloatMax").floatValue(), 0.0001f);
    Assert.assertEquals(singleValueMin, result.getMetric("singleFloatMin").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueMin, result.getMetric("multiFloatMin").floatValue(), 0.0001f);
    Assert.assertEquals((long) singleValueSum, result.getMetric("singleLongSum").longValue());
    Assert.assertEquals((long) multiValueSum, result.getMetric("multiLongSum").longValue());
    Assert.assertEquals((long) singleValueMax, result.getMetric("singleLongMax").longValue());
    Assert.assertEquals((long) multiValueMax, result.getMetric("multiLongMax").longValue());
    Assert.assertEquals((long) singleValueMin, result.getMetric("singleLongMin").longValue());
    Assert.assertEquals((long) multiValueMin, result.getMetric("multiLongMin").longValue());
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Row(org.apache.druid.data.input.Row) ResultRow(org.apache.druid.query.groupby.ResultRow) InputRow(org.apache.druid.data.input.InputRow) Test(org.junit.Test)

Example 2 with GroupByQuery

use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.

the class VectorGroupByEngineIteratorTest method testCreateOneGrouperAndCloseItWhenClose.

@Test
public void testCreateOneGrouperAndCloseItWhenClose() throws IOException {
    final Interval interval = TestIndex.DATA_INTERVAL;
    final AggregatorFactory factory = new DoubleSumAggregatorFactory("index", "index");
    final GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setInterval(interval).setDimensions(new DefaultDimensionSpec("market", null, null)).setAggregatorSpecs(factory).build();
    final StorageAdapter storageAdapter = new QueryableIndexStorageAdapter(TestIndex.getMMappedTestIndex());
    final ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[4096]);
    final VectorCursor cursor = storageAdapter.makeVectorCursor(Filters.toFilter(query.getDimFilter()), interval, query.getVirtualColumns(), false, QueryContexts.getVectorSize(query), null);
    final List<GroupByVectorColumnSelector> dimensions = query.getDimensions().stream().map(dimensionSpec -> ColumnProcessors.makeVectorProcessor(dimensionSpec, GroupByVectorColumnProcessorFactory.instance(), cursor.getColumnSelectorFactory())).collect(Collectors.toList());
    final MutableObject<VectorGrouper> grouperCaptor = new MutableObject<>();
    final VectorGroupByEngineIterator iterator = new VectorGroupByEngineIterator(query, new GroupByQueryConfig(), storageAdapter, cursor, interval, dimensions, byteBuffer, null) {

        @Override
        VectorGrouper makeGrouper() {
            grouperCaptor.setValue(Mockito.spy(super.makeGrouper()));
            return grouperCaptor.getValue();
        }
    };
    iterator.close();
    Mockito.verify(grouperCaptor.getValue()).close();
}
Also used : VectorGroupByEngineIterator(org.apache.druid.query.groupby.epinephelinae.vector.VectorGroupByEngine.VectorGroupByEngineIterator) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IOException(java.io.IOException) StorageAdapter(org.apache.druid.segment.StorageAdapter) Collectors(java.util.stream.Collectors) QueryContexts(org.apache.druid.query.QueryContexts) ColumnProcessors(org.apache.druid.segment.ColumnProcessors) ByteBuffer(java.nio.ByteBuffer) TestIndex(org.apache.druid.segment.TestIndex) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) VectorCursor(org.apache.druid.segment.vector.VectorCursor) Mockito(org.mockito.Mockito) Interval(org.joda.time.Interval) List(java.util.List) QueryRunnerTestHelper(org.apache.druid.query.QueryRunnerTestHelper) VectorGrouper(org.apache.druid.query.groupby.epinephelinae.VectorGrouper) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MutableObject(org.apache.commons.lang3.mutable.MutableObject) Filters(org.apache.druid.segment.filter.Filters) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) StorageAdapter(org.apache.druid.segment.StorageAdapter) QueryableIndexStorageAdapter(org.apache.druid.segment.QueryableIndexStorageAdapter) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ByteBuffer(java.nio.ByteBuffer) VectorCursor(org.apache.druid.segment.vector.VectorCursor) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) VectorGrouper(org.apache.druid.query.groupby.epinephelinae.VectorGrouper) VectorGroupByEngineIterator(org.apache.druid.query.groupby.epinephelinae.vector.VectorGroupByEngine.VectorGroupByEngineIterator) Interval(org.joda.time.Interval) MutableObject(org.apache.commons.lang3.mutable.MutableObject) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 3 with GroupByQuery

use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.

the class CachingClusteredClientTest method testQueryCaching.

@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
    final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
    final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
    parseResults(queryIntervals, expectedResults, args);
    for (int i = 0; i < queryIntervals.size(); ++i) {
        List<Object> mocks = new ArrayList<>();
        mocks.add(serverView);
        final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
        final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
        List<Capture> queryCaptures = new ArrayList<>();
        final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
        for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
            DruidServer server = entry.getKey();
            ServerExpectations expectations = entry.getValue();
            EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
            final Capture<? extends QueryPlus> capture = Capture.newInstance();
            final Capture<? extends ResponseContext> context = Capture.newInstance();
            queryCaptures.add(capture);
            QueryRunner queryable = expectations.getQueryRunner();
            if (query instanceof TimeseriesQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
            } else if (query instanceof TopNQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TopNResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SearchQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<SearchResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
            } else if (query instanceof GroupByQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<ResultRow>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults((GroupByQuery) query, segmentIds, intervals, results)).once();
            } else if (query instanceof TimeBoundaryQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TimeBoundaryResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
            } else {
                throw new ISE("Unknown query type[%s]", query.getClass());
            }
        }
        final int expectedResultsRangeStart;
        final int expectedResultsRangeEnd;
        if (query instanceof TimeBoundaryQuery) {
            expectedResultsRangeStart = i;
            expectedResultsRangeEnd = i + 1;
        } else {
            expectedResultsRangeStart = 0;
            expectedResultsRangeEnd = i + 1;
        }
        runWithMocks(new Runnable() {

            @Override
            public void run() {
                for (int i = 0; i < numTimesToQuery; ++i) {
                    TestHelper.assertExpectedResults(new MergeIterable(query instanceof GroupByQuery ? ((GroupByQuery) query).getResultOrdering() : Comparators.naturalNullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {

                        @Override
                        public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
                            List<Iterable<Result<Object>>> retVal = new ArrayList<>();
                            final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
                            for (ServerExpectations expectations : exps.values()) {
                                for (ServerExpectation expectation : expectations) {
                                    retVal.add(expectation.getResults());
                                }
                            }
                            return retVal;
                        }
                    })), runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval)))), initializeResponseContext()));
                    if (queryCompletedCallback != null) {
                        queryCompletedCallback.run();
                    }
                }
            }
        }, mocks.toArray());
        // make sure all the queries were sent down as 'bySegment'
        for (Capture queryCapture : queryCaptures) {
            QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
            Query capturedQuery = capturedQueryPlus.getQuery();
            if (expectBySegment) {
                Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
            } else {
                Assert.assertTrue(capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false));
            }
        }
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) TopNQuery(org.apache.druid.query.topn.TopNQuery) Query(org.apache.druid.query.Query) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) SearchQuery(org.apache.druid.query.search.SearchQuery) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) ArrayList(java.util.ArrayList) Capture(org.easymock.Capture) Result(org.apache.druid.query.Result) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) TimeBoundaryResultValue(org.apache.druid.query.timeboundary.TimeBoundaryResultValue) TopNQuery(org.apache.druid.query.topn.TopNQuery) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ResultRow(org.apache.druid.query.groupby.ResultRow) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Nullable(javax.annotation.Nullable) Interval(org.joda.time.Interval) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) SearchResultValue(org.apache.druid.query.search.SearchResultValue) HashPartitionFunction(org.apache.druid.timeline.partition.HashPartitionFunction) Function(com.google.common.base.Function) HashFunction(com.google.common.hash.HashFunction) ISE(org.apache.druid.java.util.common.ISE) QueryPlus(org.apache.druid.query.QueryPlus) SearchQuery(org.apache.druid.query.search.SearchQuery) SegmentId(org.apache.druid.timeline.SegmentId) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner)

Example 4 with GroupByQuery

use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.

the class DruidQuery method computeQuery.

/**
 * Return this query as some kind of Druid query. The returned query will either be {@link TopNQuery},
 * {@link TimeseriesQuery}, {@link GroupByQuery}, {@link ScanQuery}
 *
 * @return Druid query
 */
private Query computeQuery(final QueryFeatureInspector queryFeatureInspector) {
    if (dataSource instanceof QueryDataSource) {
        // If there is a subquery, then we prefer the outer query to be a groupBy if possible, since this potentially
        // enables more efficient execution. (The groupBy query toolchest can handle some subqueries by itself, without
        // requiring the Broker to inline results.)
        final GroupByQuery outerQuery = toGroupByQuery(queryFeatureInspector);
        if (outerQuery != null) {
            return outerQuery;
        }
    }
    final TimeseriesQuery tsQuery = toTimeseriesQuery(queryFeatureInspector);
    if (tsQuery != null) {
        return tsQuery;
    }
    final TopNQuery topNQuery = toTopNQuery(queryFeatureInspector);
    if (topNQuery != null) {
        return topNQuery;
    }
    final GroupByQuery groupByQuery = toGroupByQuery(queryFeatureInspector);
    if (groupByQuery != null) {
        return groupByQuery;
    }
    final ScanQuery scanQuery = toScanQuery(queryFeatureInspector);
    if (scanQuery != null) {
        return scanQuery;
    }
    throw new CannotBuildQueryException("Cannot convert query parts into an actual query");
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) QueryDataSource(org.apache.druid.query.QueryDataSource) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) TopNQuery(org.apache.druid.query.topn.TopNQuery) ScanQuery(org.apache.druid.query.scan.ScanQuery)

Example 5 with GroupByQuery

use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.

the class DruidQuery method toGroupByQuery.

/**
 * Return this query as a GroupBy query, or null if this query is not compatible with GroupBy.
 *
 * @return query or null
 */
@Nullable
private GroupByQuery toGroupByQuery(final QueryFeatureInspector queryFeatureInspector) {
    if (grouping == null) {
        return null;
    }
    if (sorting != null && sorting.getOffsetLimit().hasLimit() && sorting.getOffsetLimit().getLimit() <= 0) {
        // Cannot handle zero or negative limits.
        return null;
    }
    final Pair<DataSource, Filtration> dataSourceFiltrationPair = getFiltration(dataSource, filter, virtualColumnRegistry);
    final DataSource newDataSource = dataSourceFiltrationPair.lhs;
    final Filtration filtration = dataSourceFiltrationPair.rhs;
    final DimFilterHavingSpec havingSpec;
    if (grouping.getHavingFilter() != null) {
        havingSpec = new DimFilterHavingSpec(Filtration.create(grouping.getHavingFilter()).optimizeFilterOnly(grouping.getOutputRowSignature()).getDimFilter(), true);
    } else {
        havingSpec = null;
    }
    final List<PostAggregator> postAggregators = new ArrayList<>(grouping.getPostAggregators());
    if (sorting != null && sorting.getProjection() != null) {
        postAggregators.addAll(sorting.getProjection().getPostAggregators());
    }
    GroupByQuery query = new GroupByQuery(newDataSource, filtration.getQuerySegmentSpec(), getVirtualColumns(true), filtration.getDimFilter(), Granularities.ALL, grouping.getDimensionSpecs(), grouping.getAggregatorFactories(), postAggregators, havingSpec, Optional.ofNullable(sorting).orElse(Sorting.none()).limitSpec(), grouping.getSubtotals().toSubtotalsSpec(grouping.getDimensionSpecs()), ImmutableSortedMap.copyOf(plannerContext.getQueryContext()));
    // We don't apply timestamp computation optimization yet when limit is pushed down. Maybe someday.
    if (query.getLimitSpec() instanceof DefaultLimitSpec && query.isApplyLimitPushDown()) {
        return query;
    }
    Map<String, Object> theContext = new HashMap<>();
    Granularity queryGranularity = null;
    // now, part of the query plan logic is handled in GroupByStrategyV2.
    if (!grouping.getDimensions().isEmpty()) {
        for (DimensionExpression dimensionExpression : grouping.getDimensions()) {
            Granularity granularity = Expressions.toQueryGranularity(dimensionExpression.getDruidExpression(), plannerContext.getExprMacroTable());
            if (granularity == null) {
                continue;
            }
            if (queryGranularity != null) {
                // group by more than one timestamp_floor
                // eg: group by timestamp_floor(__time to DAY),timestamp_floor(__time, to HOUR)
                queryGranularity = null;
                break;
            }
            queryGranularity = granularity;
            int timestampDimensionIndexInDimensions = grouping.getDimensions().indexOf(dimensionExpression);
            // these settings will only affect the most inner query sent to the down streaming compute nodes
            theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD, dimensionExpression.getOutputName());
            theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_INDEX, timestampDimensionIndexInDimensions);
            theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_GRANULARITY, queryGranularity);
        }
    }
    if (queryGranularity == null) {
        return query;
    }
    return query.withOverriddenContext(theContext);
}
Also used : DimFilterHavingSpec(org.apache.druid.query.groupby.having.DimFilterHavingSpec) Filtration(org.apache.druid.sql.calcite.filtration.Filtration) PostAggregator(org.apache.druid.query.aggregation.PostAggregator) DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) HashMap(java.util.HashMap) IntArrayList(it.unimi.dsi.fastutil.ints.IntArrayList) ArrayList(java.util.ArrayList) DimensionExpression(org.apache.druid.sql.calcite.aggregation.DimensionExpression) Granularity(org.apache.druid.java.util.common.granularity.Granularity) DataSource(org.apache.druid.query.DataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) JoinDataSource(org.apache.druid.query.JoinDataSource) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) Nullable(javax.annotation.Nullable)

Aggregations

GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)95 Test (org.junit.Test)68 ResultRow (org.apache.druid.query.groupby.ResultRow)57 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)49 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)37 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)37 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)37 List (java.util.List)21 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)21 LegacySegmentSpec (org.apache.druid.query.spec.LegacySegmentSpec)20 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)20 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)17 ArrayList (java.util.ArrayList)16 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)15 Collectors (java.util.stream.Collectors)13 QueryDataSource (org.apache.druid.query.QueryDataSource)13 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)13 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)12 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)12 MapBasedRow (org.apache.druid.data.input.MapBasedRow)11