Search in sources :

Example 81 with Result

use of org.apache.druid.query.Result in project druid by druid-io.

the class BackgroundCachePopulatorTest method testWrap.

/**
 * Method: wrap(final Sequence<T> sequence, final Function<T, CacheType> cacheFn, final Cache cache, final Cache.NamedKey cacheKey)
 */
@Test
public void testWrap() {
    String cacheId = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(cacheId, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Sequence res = this.backgroundCachePopulator.wrap(this.baseRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty()), (value) -> cacheStrategy.prepareForSegmentLevelCache().apply(value), cache, cacheKey);
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    List results = res.toList();
    Assert.assertTrue(closable.isClosed());
    List<Result> expectedRes = makeTopNResults(false, OBJECTS);
    Assert.assertEquals(expectedRes.toString(), results.toString());
    Assert.assertEquals(5, results.size());
}
Also used : SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ArrayList(java.util.ArrayList) List(java.util.List) Sequence(org.apache.druid.java.util.common.guava.Sequence) CacheStrategy(org.apache.druid.query.CacheStrategy) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 82 with Result

use of org.apache.druid.query.Result in project druid by druid-io.

the class BackgroundCachePopulatorTest method makeTopNResults.

private List<Result> makeTopNResults(boolean cachedResults, Object... objects) {
    List<Result> retVal = new ArrayList<>();
    int index = 0;
    while (index < objects.length) {
        DateTime timestamp = (DateTime) objects[index++];
        List<Map<String, Object>> values = new ArrayList<>();
        while (index < objects.length && !(objects[index] instanceof DateTime)) {
            if (objects.length - index < 3) {
                throw new ISE("expect 3 values for each entry in the top list, had %d values left.", objects.length - index);
            }
            final double imps = ((Number) objects[index + 2]).doubleValue();
            final double rows = ((Number) objects[index + 1]).doubleValue();
            if (cachedResults) {
                values.add(ImmutableMap.of("top_dim", objects[index], "rows", rows, "imps", imps, "impers", imps));
            } else {
                values.add(ImmutableMap.of("top_dim", objects[index], "rows", rows, "imps", imps, "impers", imps, "avg_imps_per_row", imps / rows));
            }
            index += 3;
        }
        retVal.add(new Result<>(timestamp, new TopNResultValue(values)));
    }
    return retVal;
}
Also used : TopNResultValue(org.apache.druid.query.topn.TopNResultValue) ArrayList(java.util.ArrayList) ISE(org.apache.druid.java.util.common.ISE) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) DateTime(org.joda.time.DateTime) Result(org.apache.druid.query.Result)

Example 83 with Result

use of org.apache.druid.query.Result in project druid by druid-io.

the class BackgroundCachePopulatorTest method testWrapOnFailure.

@Test
public void testWrapOnFailure() {
    String cacheId = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(cacheId, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Sequence res = this.backgroundCachePopulator.wrap(this.baseRunner.run(QueryPlus.wrap(query), ResponseContext.createEmpty()), (value) -> {
        throw new RuntimeException("Error");
    }, cache, cacheKey);
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    List results = res.toList();
    Assert.assertTrue(closable.isClosed());
    List<Result> expectedRes = makeTopNResults(false, OBJECTS);
    Assert.assertEquals(expectedRes.toString(), results.toString());
    Assert.assertEquals(5, results.size());
}
Also used : SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ArrayList(java.util.ArrayList) List(java.util.List) Sequence(org.apache.druid.java.util.common.guava.Sequence) CacheStrategy(org.apache.druid.query.CacheStrategy) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 84 with Result

use of org.apache.druid.query.Result in project druid by druid-io.

the class SpatialFilterBonusTest method testSpatialQueryFilteredAggregator.

@Test
public void testSpatialQueryFilteredAggregator() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new FilteredAggregatorFactory(new LongSumAggregatorFactory("valFiltered", "val"), new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 4995L).put("val", 12497502L).put("valFiltered", 17L).build())), new Result<>(DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).put("valFiltered", 29L).build())), new Result<>(DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).put("valFiltered", 13L).build())), new Result<>(DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).put("valFiltered", 91L).build())), new Result<>(DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 2L).put("val", 548L).put("valFiltered", 47L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) RectangularBound(org.apache.druid.collections.spatial.search.RectangularBound) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Example 85 with Result

use of org.apache.druid.query.Result in project druid by druid-io.

the class SpatialFilterBonusTest method testSpatialQuery.

@Test
public void testSpatialQuery() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("dim.geo", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<TimeseriesResultValue>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 3L).put("val", 59L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) RadiusBound(org.apache.druid.collections.spatial.search.RadiusBound) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Aggregations

Result (org.apache.druid.query.Result)252 Test (org.junit.Test)181 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)143 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)46 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)44 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)41 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)40 QueryRunner (org.apache.druid.query.QueryRunner)37 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)36 DoubleMaxAggregatorFactory (org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory)34 DoubleMinAggregatorFactory (org.apache.druid.query.aggregation.DoubleMinAggregatorFactory)34 ArrayList (java.util.ArrayList)33 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)32 ExtractionDimensionSpec (org.apache.druid.query.dimension.ExtractionDimensionSpec)32 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)31 TopNResultValue (org.apache.druid.query.topn.TopNResultValue)28 DateTime (org.joda.time.DateTime)28 Benchmark (org.openjdk.jmh.annotations.Benchmark)28 BenchmarkMode (org.openjdk.jmh.annotations.BenchmarkMode)28 OutputTimeUnit (org.openjdk.jmh.annotations.OutputTimeUnit)28