Search in sources :

Example 56 with QueryRunner

use of io.druid.query.QueryRunner in project druid by druid-io.

the class GroupByQueryRunnerTest method testMergeResults.

@Test
public void testMergeResults() {
    GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
    final GroupByQuery fullQuery = builder.build();
    final GroupByQuery allGranQuery = builder.copy().setGranularity(Granularities.ALL).build();
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {

        @Override
        public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
            // simulate two daily segments
            final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
            final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
            return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
        }
    });
    List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L));
    Map<String, Object> context = Maps.newHashMap();
    TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged");
    List<Row> allGranExpectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "business", "rows", 2L, "idx", 217L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 2L, "idx", 319L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "health", "rows", 2L, "idx", 216L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 6L, "idx", 4420L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 2L, "idx", 221L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "premium", "rows", 6L, "idx", 4416L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "technology", "rows", 2L, "idx", 177L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L));
    TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(allGranQuery, context), "merged");
}
Also used : Query(io.druid.query.Query) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(io.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) Sequence(io.druid.java.util.common.guava.Sequence) MergeSequence(io.druid.java.util.common.guava.MergeSequence) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Row(io.druid.data.input.Row) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 57 with QueryRunner

use of io.druid.query.QueryRunner in project druid by druid-io.

the class GroupByQueryRunnerTest method testBySegmentResultsWithAllFiltersWithExtractionFns.

@Test
public void testBySegmentResultsWithAllFiltersWithExtractionFns() {
    int segmentCount = 32;
    Result<BySegmentResultValue> singleSegmentResult = new Result<BySegmentResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass(Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
    List<Result> bySegmentResults = Lists.newArrayList();
    for (int i = 0; i < segmentCount; i++) {
        bySegmentResults.add(singleSegmentResult);
    }
    String extractionJsFn = "function(str) { return 'super-' + str; }";
    String jsFn = "function(x) { return(x === 'super-mezzanine') }";
    ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
    List<DimFilter> superFilterList = new ArrayList<>();
    superFilterList.add(new SelectorDimFilter("quality", "super-mezzanine", extractionFn));
    superFilterList.add(new InDimFilter("quality", Arrays.asList("not-super-mezzanine", "FOOBAR", "super-mezzanine"), extractionFn));
    superFilterList.add(new BoundDimFilter("quality", "super-mezzanine", "super-mezzanine", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
    superFilterList.add(new RegexDimFilter("quality", "super-mezzanine", extractionFn));
    superFilterList.add(new SearchQueryDimFilter("quality", new ContainsSearchQuerySpec("super-mezzanine", true), extractionFn));
    superFilterList.add(new JavaScriptDimFilter("quality", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
    DimFilter superFilter = new AndDimFilter(superFilterList);
    GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(superFilter).setContext(ImmutableMap.<String, Object>of("bySegment", true));
    final GroupByQuery fullQuery = builder.build();
    QueryToolChest toolChest = factory.getToolchest();
    List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
    for (int i = 0; i < segmentCount; i++) {
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
    TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(fullQuery, Maps.newHashMap()), "");
    exec.shutdownNow();
}
Also used : BoundDimFilter(io.druid.query.filter.BoundDimFilter) ContainsSearchQuerySpec(io.druid.query.search.search.ContainsSearchQuerySpec) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(io.druid.java.util.common.granularity.PeriodGranularity) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) InDimFilter(io.druid.query.filter.InDimFilter) BySegmentResultValue(io.druid.query.BySegmentResultValue) RegexDimFilter(io.druid.query.filter.RegexDimFilter) AndDimFilter(io.druid.query.filter.AndDimFilter) JavaScriptExtractionFn(io.druid.query.extraction.JavaScriptExtractionFn) BySegmentResultValueClass(io.druid.query.BySegmentResultValueClass) Period(org.joda.time.Period) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) LookupExtractionFn(io.druid.query.lookup.LookupExtractionFn) StrlenExtractionFn(io.druid.query.extraction.StrlenExtractionFn) JavaScriptExtractionFn(io.druid.query.extraction.JavaScriptExtractionFn) CascadeExtractionFn(io.druid.query.extraction.CascadeExtractionFn) TimeFormatExtractionFn(io.druid.query.extraction.TimeFormatExtractionFn) ExtractionFn(io.druid.query.extraction.ExtractionFn) RegexDimExtractionFn(io.druid.query.extraction.RegexDimExtractionFn) DimExtractionFn(io.druid.query.extraction.DimExtractionFn) ExecutorService(java.util.concurrent.ExecutorService) SearchQueryDimFilter(io.druid.query.filter.SearchQueryDimFilter) JavaScriptDimFilter(io.druid.query.filter.JavaScriptDimFilter) JavaScriptDimFilter(io.druid.query.filter.JavaScriptDimFilter) SearchQueryDimFilter(io.druid.query.filter.SearchQueryDimFilter) ExtractionDimFilter(io.druid.query.filter.ExtractionDimFilter) RegexDimFilter(io.druid.query.filter.RegexDimFilter) BoundDimFilter(io.druid.query.filter.BoundDimFilter) InDimFilter(io.druid.query.filter.InDimFilter) OrDimFilter(io.druid.query.filter.OrDimFilter) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) DimFilter(io.druid.query.filter.DimFilter) AndDimFilter(io.druid.query.filter.AndDimFilter) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 58 with QueryRunner

use of io.druid.query.QueryRunner in project druid by druid-io.

the class SegmentAnalyzerTest method getSegmentAnalysises.

/**
   * *Awesome* method name auto-generated by IntelliJ!  I love IntelliJ!
   *
   * @param index
   *
   * @return
   */
private List<SegmentAnalysis> getSegmentAnalysises(Segment index, EnumSet<SegmentMetadataQuery.AnalysisType> analyses) {
    final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner((QueryRunnerFactory) new SegmentMetadataQueryRunnerFactory(new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()), QueryRunnerTestHelper.NOOP_QUERYWATCHER), index, null);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new LegacyDataSource("test"), QuerySegmentSpecs.create("2011/2012"), null, null, null, analyses, false, false);
    HashMap<String, Object> context = new HashMap<String, Object>();
    return Sequences.toList(query.run(runner, context), Lists.<SegmentAnalysis>newArrayList());
}
Also used : HashMap(java.util.HashMap) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) LegacyDataSource(io.druid.query.LegacyDataSource) QueryRunner(io.druid.query.QueryRunner)

Example 59 with QueryRunner

use of io.druid.query.QueryRunner in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithTimestampSpecMerge.

@Test
public void testSegmentMetadataQueryWithTimestampSpecMerge() {
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, new TimestampSpec("ds", "auto", null), null, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.TIMESTAMPSPEC).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) ListColumnIncluderator(io.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) TimestampSpec(io.druid.data.input.impl.TimestampSpec) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(io.druid.query.QueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Test(org.junit.Test)

Example 60 with QueryRunner

use of io.druid.query.QueryRunner in project druid by druid-io.

the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithAggregatorsMerge.

@Test
public void testSegmentMetadataQueryWithAggregatorsMerge() {
    final Map<String, AggregatorFactory> expectedAggregators = Maps.newHashMap();
    for (AggregatorFactory agg : TestIndex.METRIC_AGGS) {
        expectedAggregators.put(agg.getName(), agg.getCombiningFactory());
    }
    SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 0, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), expectedAggregators, null, null, null);
    QueryToolChest toolChest = FACTORY.getToolchest();
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
    TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement"))).analysisTypes(SegmentMetadataQuery.AnalysisType.AGGREGATORS).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
    exec.shutdownNow();
}
Also used : FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) ListColumnIncluderator(io.druid.query.metadata.metadata.ListColumnIncluderator) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) ExecutorService(java.util.concurrent.ExecutorService) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) QueryToolChest(io.druid.query.QueryToolChest) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Test(org.junit.Test)

Aggregations

QueryRunner (io.druid.query.QueryRunner)96 Test (org.junit.Test)72 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)55 DateTime (org.joda.time.DateTime)53 Result (io.druid.query.Result)49 Interval (org.joda.time.Interval)47 HashMap (java.util.HashMap)43 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)24 Query (io.druid.query.Query)23 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)23 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)21 QueryToolChest (io.druid.query.QueryToolChest)20 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)20 TimeseriesQueryQueryToolChest (io.druid.query.timeseries.TimeseriesQueryQueryToolChest)18 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)16 ArrayList (java.util.ArrayList)16 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)14 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)14 IOException (java.io.IOException)14 List (java.util.List)14