Search in sources :

Example 26 with TimeseriesResultValue

use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.

the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndTimeseriesQuery.

@Test
public void testSimpleDataIngestAndTimeseriesQuery() throws Exception {
    AggregationTestHelper timeseriesQueryAggregationTestHelper = AggregationTestHelper.createTimeseriesQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
    Sequence seq = timeseriesQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("timeseries_query.json"));
    Result<TimeseriesResultValue> result = (Result<TimeseriesResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
    Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
    Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketch_count"), 0.01);
    Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchEstimatePostAgg"), 0.01);
    Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
    Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
    Assert.assertEquals(0.0, result.getValue().getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
    Assert.assertEquals(0.0, result.getValue().getDoubleMetric("non_existing_col_validation"), 0.01);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 27 with TimeseriesResultValue

use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.

the class CachingClusteredClientTest method testQueryCaching.

@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
    final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
    final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
    parseResults(queryIntervals, expectedResults, args);
    for (int i = 0; i < queryIntervals.size(); ++i) {
        List<Object> mocks = Lists.newArrayList();
        mocks.add(serverView);
        final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
        final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
        List<Capture> queryCaptures = Lists.newArrayList();
        final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
        for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
            DruidServer server = entry.getKey();
            ServerExpectations expectations = entry.getValue();
            EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
            final Capture<? extends Query> capture = new Capture();
            final Capture<? extends Map> context = new Capture();
            queryCaptures.add(capture);
            QueryRunner queryable = expectations.getQueryRunner();
            if (query instanceof TimeseriesQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TimeseriesResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
            } else if (query instanceof TopNQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TopNResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SearchQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<SearchResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SelectQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<SelectResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSelectResults(segmentIds, intervals, results)).once();
            } else if (query instanceof GroupByQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Row>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults(segmentIds, intervals, results)).once();
            } else if (query instanceof TimeBoundaryQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TimeBoundaryResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
            } else {
                throw new ISE("Unknown query type[%s]", query.getClass());
            }
        }
        final int expectedResultsRangeStart;
        final int expectedResultsRangeEnd;
        if (query instanceof TimeBoundaryQuery) {
            expectedResultsRangeStart = i;
            expectedResultsRangeEnd = i + 1;
        } else {
            expectedResultsRangeStart = 0;
            expectedResultsRangeEnd = i + 1;
        }
        runWithMocks(new Runnable() {

            @Override
            public void run() {
                HashMap<String, List> context = new HashMap<String, List>();
                for (int i = 0; i < numTimesToQuery; ++i) {
                    TestHelper.assertExpectedResults(new MergeIterable<>(Ordering.<Result<Object>>natural().nullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {

                        @Override
                        public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
                            List<Iterable<Result<Object>>> retVal = Lists.newArrayList();
                            final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
                            for (ServerExpectations expectations : exps.values()) {
                                for (ServerExpectation expectation : expectations) {
                                    retVal.add(expectation.getResults());
                                }
                            }
                            return retVal;
                        }
                    })), runner.run(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval))), context));
                    if (queryCompletedCallback != null) {
                        queryCompletedCallback.run();
                    }
                }
            }
        }, mocks.toArray());
        // make sure all the queries were sent down as 'bySegment'
        for (Capture queryCapture : queryCaptures) {
            Query capturedQuery = (Query) queryCapture.getValue();
            if (expectBySegment) {
                Assert.assertEquals(true, capturedQuery.getContextValue("bySegment"));
            } else {
                Assert.assertTrue(capturedQuery.getContextValue("bySegment") == null || capturedQuery.getContextValue("bySegment").equals(false));
            }
        }
    }
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MergeIterable(io.druid.java.util.common.guava.MergeIterable) FunctionalIterable(io.druid.java.util.common.guava.FunctionalIterable) SelectQuery(io.druid.query.select.SelectQuery) GroupByQuery(io.druid.query.groupby.GroupByQuery) SearchQuery(io.druid.query.search.search.SearchQuery) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) TopNQuery(io.druid.query.topn.TopNQuery) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) HashMap(java.util.HashMap) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) Capture(org.easymock.Capture) Result(io.druid.query.Result) SearchResultValue(io.druid.query.search.SearchResultValue) Function(com.google.common.base.Function) HashFunction(com.google.common.hash.HashFunction) GroupByQuery(io.druid.query.groupby.GroupByQuery) MergeIterable(io.druid.java.util.common.guava.MergeIterable) TopNQuery(io.druid.query.topn.TopNQuery) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ISE(io.druid.java.util.common.ISE) SearchQuery(io.druid.query.search.search.SearchQuery) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) SelectQuery(io.druid.query.select.SelectQuery) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Nullable(javax.annotation.Nullable) Interval(org.joda.time.Interval)

Example 28 with TimeseriesResultValue

use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.

the class CachingQueryRunnerTest method testTimeseries.

@Test
public void testTimeseries() throws Exception {
    for (boolean descending : new boolean[] { false, true }) {
        TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).intervals(QueryRunnerTestHelper.firstToThird).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.qualityUniques)).descending(descending).build();
        Result row1 = new Result(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
        Result row2 = new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
        List<Result> expectedResults;
        if (descending) {
            expectedResults = Lists.newArrayList(row2, row1);
        } else {
            expectedResults = Lists.newArrayList(row1, row2);
        }
        QueryToolChest toolChest = new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator());
        testCloseAndPopulate(expectedResults, expectedResults, query, toolChest);
        testUseCache(expectedResults, query, toolChest);
    }
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) Test(org.junit.Test)

Example 29 with TimeseriesResultValue

use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.

the class KafkaIndexTaskTest method countEvents.

public long countEvents(final Task task) throws Exception {
    // Do a query.
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SCHEMA.getDataSource()).aggregators(ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("rows", "rows"))).granularity(Granularities.ALL).intervals("0000/3000").build();
    ArrayList<Result<TimeseriesResultValue>> results = Sequences.toList(task.getQueryRunner(query).run(query, ImmutableMap.<String, Object>of()), Lists.<Result<TimeseriesResultValue>>newArrayList());
    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric("rows");
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Result(io.druid.query.Result)

Example 30 with TimeseriesResultValue

use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.

the class RetryQueryRunnerTest method testRunWithMissingSegments.

@Test
public void testRunWithMissingSegments() throws Exception {
    Map<String, Object> context = new MapMaker().makeMap();
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query query, Map context) {
            ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
            return Sequences.empty();
        }
    }, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {

        @Override
        public int getNumTries() {
            return 0;
        }

        @Override
        public boolean isReturnPartialResults() {
            return true;
        }
    }, jsonMapper);
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 1);
    Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) MapMaker(com.google.common.collect.MapMaker) Sequence(io.druid.java.util.common.guava.Sequence) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) List(java.util.List) Map(java.util.Map) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)49 Result (io.druid.query.Result)40 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)34 Test (org.junit.Test)34 DateTime (org.joda.time.DateTime)29 Interval (org.joda.time.Interval)24 QueryRunner (io.druid.query.QueryRunner)21 TimeseriesQueryQueryToolChest (io.druid.query.timeseries.TimeseriesQueryQueryToolChest)20 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)18 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)17 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)17 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)17 HashMap (java.util.HashMap)17 TimeseriesQueryEngine (io.druid.query.timeseries.TimeseriesQueryEngine)13 TimeseriesQueryRunnerFactory (io.druid.query.timeseries.TimeseriesQueryRunnerFactory)12 Sequence (io.druid.java.util.common.guava.Sequence)9 SpatialDimFilter (io.druid.query.filter.SpatialDimFilter)9 IOException (java.io.IOException)7 List (java.util.List)7 Map (java.util.Map)7