Search in sources :

Example 6 with ResultRow

use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.

the class ArrayDoubleGroupByColumnSelectorStrategyTest method testAddingInDictionary.

@Test
public void testAddingInDictionary() {
    ColumnValueSelector columnValueSelector = Mockito.mock(ColumnValueSelector.class);
    Mockito.when(columnValueSelector.getObject()).thenReturn(ImmutableList.of(4.0, 2.0));
    Assert.assertEquals(3, strategy.getOnlyValue(columnValueSelector));
    GroupByColumnSelectorPlus groupByColumnSelectorPlus = Mockito.mock(GroupByColumnSelectorPlus.class);
    Mockito.when(groupByColumnSelectorPlus.getResultRowPosition()).thenReturn(0);
    ResultRow row = ResultRow.create(1);
    buffer1.putInt(3);
    strategy.processValueFromGroupingKey(groupByColumnSelectorPlus, buffer1, row, 0);
    Assert.assertEquals(new ComparableList(ImmutableList.of(4.0, 2.0)), row.get(0));
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) ComparableList(org.apache.druid.segment.data.ComparableList) ColumnValueSelector(org.apache.druid.segment.ColumnValueSelector) Test(org.junit.Test)

Example 7 with ResultRow

use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.

the class CachingClusteredClientTest method testQueryCaching.

@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
    final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
    final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
    parseResults(queryIntervals, expectedResults, args);
    for (int i = 0; i < queryIntervals.size(); ++i) {
        List<Object> mocks = new ArrayList<>();
        mocks.add(serverView);
        final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
        final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
        List<Capture> queryCaptures = new ArrayList<>();
        final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
        for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
            DruidServer server = entry.getKey();
            ServerExpectations expectations = entry.getValue();
            EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
            final Capture<? extends QueryPlus> capture = Capture.newInstance();
            final Capture<? extends ResponseContext> context = Capture.newInstance();
            queryCaptures.add(capture);
            QueryRunner queryable = expectations.getQueryRunner();
            if (query instanceof TimeseriesQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
            } else if (query instanceof TopNQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TopNResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SearchQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<SearchResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
            } else if (query instanceof GroupByQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<ResultRow>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults((GroupByQuery) query, segmentIds, intervals, results)).once();
            } else if (query instanceof TimeBoundaryQuery) {
                List<SegmentId> segmentIds = new ArrayList<>();
                List<Interval> intervals = new ArrayList<>();
                List<Iterable<Result<TimeBoundaryResultValue>>> results = new ArrayList<>();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
            } else {
                throw new ISE("Unknown query type[%s]", query.getClass());
            }
        }
        final int expectedResultsRangeStart;
        final int expectedResultsRangeEnd;
        if (query instanceof TimeBoundaryQuery) {
            expectedResultsRangeStart = i;
            expectedResultsRangeEnd = i + 1;
        } else {
            expectedResultsRangeStart = 0;
            expectedResultsRangeEnd = i + 1;
        }
        runWithMocks(new Runnable() {

            @Override
            public void run() {
                for (int i = 0; i < numTimesToQuery; ++i) {
                    TestHelper.assertExpectedResults(new MergeIterable(query instanceof GroupByQuery ? ((GroupByQuery) query).getResultOrdering() : Comparators.naturalNullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {

                        @Override
                        public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
                            List<Iterable<Result<Object>>> retVal = new ArrayList<>();
                            final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
                            for (ServerExpectations expectations : exps.values()) {
                                for (ServerExpectation expectation : expectations) {
                                    retVal.add(expectation.getResults());
                                }
                            }
                            return retVal;
                        }
                    })), runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval)))), initializeResponseContext()));
                    if (queryCompletedCallback != null) {
                        queryCompletedCallback.run();
                    }
                }
            }
        }, mocks.toArray());
        // make sure all the queries were sent down as 'bySegment'
        for (Capture queryCapture : queryCaptures) {
            QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
            Query capturedQuery = capturedQueryPlus.getQuery();
            if (expectBySegment) {
                Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
            } else {
                Assert.assertTrue(capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false));
            }
        }
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) TopNQuery(org.apache.druid.query.topn.TopNQuery) Query(org.apache.druid.query.Query) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) SearchQuery(org.apache.druid.query.search.SearchQuery) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) ArrayList(java.util.ArrayList) Capture(org.easymock.Capture) Result(org.apache.druid.query.Result) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) TimeBoundaryResultValue(org.apache.druid.query.timeboundary.TimeBoundaryResultValue) TopNQuery(org.apache.druid.query.topn.TopNQuery) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ResultRow(org.apache.druid.query.groupby.ResultRow) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Nullable(javax.annotation.Nullable) Interval(org.joda.time.Interval) MergeIterable(org.apache.druid.java.util.common.guava.MergeIterable) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(org.apache.druid.query.timeboundary.TimeBoundaryQuery) SearchResultValue(org.apache.druid.query.search.SearchResultValue) HashPartitionFunction(org.apache.druid.timeline.partition.HashPartitionFunction) Function(com.google.common.base.Function) HashFunction(com.google.common.hash.HashFunction) ISE(org.apache.druid.java.util.common.ISE) QueryPlus(org.apache.druid.query.QueryPlus) SearchQuery(org.apache.druid.query.search.SearchQuery) SegmentId(org.apache.druid.timeline.SegmentId) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner)

Example 8 with ResultRow

use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.

the class CachingClusteredClientTest method makeGroupByResults.

private Iterable<ResultRow> makeGroupByResults(GroupByQuery query, Object... objects) {
    List<ResultRow> retVal = new ArrayList<>();
    int index = 0;
    while (index < objects.length) {
        final DateTime timestamp = (DateTime) objects[index++];
        final Map<String, Object> rowMap = (Map<String, Object>) objects[index++];
        final ResultRow row = ResultRow.create(query.getResultRowSizeWithoutPostAggregators());
        if (query.getResultRowHasTimestamp()) {
            row.set(0, timestamp.getMillis());
        }
        for (Map.Entry<String, Object> entry : rowMap.entrySet()) {
            final int position = query.getResultRowSignature().indexOf(entry.getKey());
            row.set(position, entry.getValue());
        }
        retVal.add(row);
    }
    return retVal;
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) ArrayList(java.util.ArrayList) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) DateTime(org.joda.time.DateTime)

Example 9 with ResultRow

use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.

the class DoubleMeanAggregationTest method testVectorAggretatorUsingGroupByQueryOnDoubleColumn.

@Test
@Parameters(method = "doVectorize")
public void testVectorAggretatorUsingGroupByQueryOnDoubleColumn(boolean doVectorize) throws Exception {
    GroupByQuery query = new GroupByQuery.Builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval("1970/2050").setAggregatorSpecs(new DoubleMeanAggregatorFactory("meanOnDouble", SimpleTestIndex.DOUBLE_COL)).setContext(Collections.singletonMap(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
    // do json serialization and deserialization of query to ensure there are no serde issues
    ObjectMapper jsonMapper = groupByQueryTestHelper.getObjectMapper();
    query = (GroupByQuery) jsonMapper.readValue(jsonMapper.writeValueAsString(query), Query.class);
    Sequence<ResultRow> seq = groupByQueryTestHelper.runQueryOnSegmentsObjs(segments, query);
    Row result = Iterables.getOnlyElement(seq.toList()).toMapBasedRow(query);
    Assert.assertEquals(6.2d, result.getMetric("meanOnDouble").doubleValue(), 0.0001d);
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) Row(org.apache.druid.data.input.Row) ResultRow(org.apache.druid.query.groupby.ResultRow) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 10 with ResultRow

use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQuery.

@Test
public void testIngestAndQuery() throws Exception {
    try (final AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Collections.singletonList(new AggregatorsModule()), config, tempFolder)) {
        String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
        String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
        String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
        Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
        final ResultRow resultRow = seq.toList().get(0);
        Assert.assertEquals("index_hll", 3.0, ((Number) resultRow.get(0)).floatValue(), 0.1);
        Assert.assertEquals("index_unique_count", 3.0, ((Number) resultRow.get(1)).floatValue(), 0.1);
    }
}
Also used : AggregatorsModule(org.apache.druid.jackson.AggregatorsModule) ResultRow(org.apache.druid.query.groupby.ResultRow) AggregationTestHelper(org.apache.druid.query.aggregation.AggregationTestHelper) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(org.apache.druid.query.groupby.GroupByQueryRunnerTest)

Aggregations

ResultRow (org.apache.druid.query.groupby.ResultRow)129 Test (org.junit.Test)81 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)65 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)59 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)58 File (java.io.File)39 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)37 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)34 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)24 Benchmark (org.openjdk.jmh.annotations.Benchmark)21 BenchmarkMode (org.openjdk.jmh.annotations.BenchmarkMode)21 OutputTimeUnit (org.openjdk.jmh.annotations.OutputTimeUnit)21 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)20 LegacySegmentSpec (org.apache.druid.query.spec.LegacySegmentSpec)18 List (java.util.List)17 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)15 ArrayList (java.util.ArrayList)14 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)14 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)13 ByteBuffer (java.nio.ByteBuffer)12