Search in sources :

Example 1 with SearchQueryQueryToolChest

use of io.druid.query.search.SearchQueryQueryToolChest in project druid by druid-io.

the class TestQueryRunners method makeSearchQueryRunner.

public static <T> QueryRunner<T> makeSearchQueryRunner(Segment adapter) {
    final SearchQueryConfig config = new SearchQueryConfig();
    QueryRunnerFactory factory = new SearchQueryRunnerFactory(new SearchStrategySelector(Suppliers.ofInstance(config)), new SearchQueryQueryToolChest(config, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    return new FinalizeResultsQueryRunner<T>(factory.createRunner(adapter), factory.getToolchest());
}
Also used : SearchQueryQueryToolChest(io.druid.query.search.SearchQueryQueryToolChest) SearchQueryConfig(io.druid.query.search.search.SearchQueryConfig) SearchQueryRunnerFactory(io.druid.query.search.SearchQueryRunnerFactory) TopNQueryRunnerFactory(io.druid.query.topn.TopNQueryRunnerFactory) TimeBoundaryQueryRunnerFactory(io.druid.query.timeboundary.TimeBoundaryQueryRunnerFactory) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) SearchQueryRunnerFactory(io.druid.query.search.SearchQueryRunnerFactory) SearchStrategySelector(io.druid.query.search.SearchStrategySelector)

Example 2 with SearchQueryQueryToolChest

use of io.druid.query.search.SearchQueryQueryToolChest in project druid by druid-io.

the class SearchBenchmark method setup.

@Setup
public void setup() throws IOException {
    log.info("SETUP CALLED AT " + +System.currentTimeMillis());
    if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
        ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
    }
    executorService = Execs.multiThreaded(numSegments, "SearchThreadPool");
    setupQueries();
    String[] schemaQuery = schemaAndQuery.split("\\.");
    String schemaName = schemaQuery[0];
    String queryName = schemaQuery[1];
    schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
    queryBuilder = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);
    queryBuilder.limit(limit);
    query = queryBuilder.build();
    incIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        log.info("Generating rows for segment " + i);
        BenchmarkDataGenerator gen = new BenchmarkDataGenerator(schemaInfo.getColumnSchemas(), System.currentTimeMillis(), schemaInfo.getDataInterval(), rowsPerSegment);
        IncrementalIndex incIndex = makeIncIndex();
        for (int j = 0; j < rowsPerSegment; j++) {
            InputRow row = gen.nextRow();
            if (j % 10000 == 0) {
                log.info(j + " rows generated.");
            }
            incIndex.add(row);
        }
        incIndexes.add(incIndex);
    }
    tmpDir = Files.createTempDir();
    log.info("Using temp dir: " + tmpDir.getAbsolutePath());
    qIndexes = new ArrayList<>();
    for (int i = 0; i < numSegments; i++) {
        File indexFile = INDEX_MERGER_V9.persist(incIndexes.get(i), tmpDir, new IndexSpec());
        QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
        qIndexes.add(qIndex);
    }
    final SearchQueryConfig config = new SearchQueryConfig().withOverrides(query);
    factory = new SearchQueryRunnerFactory(new SearchStrategySelector(Suppliers.ofInstance(config)), new SearchQueryQueryToolChest(config, QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()), QueryBenchmarkUtil.NOOP_QUERYWATCHER);
}
Also used : IndexSpec(io.druid.segment.IndexSpec) SearchQueryConfig(io.druid.query.search.search.SearchQueryConfig) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) BenchmarkDataGenerator(io.druid.benchmark.datagen.BenchmarkDataGenerator) HyperUniquesSerde(io.druid.query.aggregation.hyperloglog.HyperUniquesSerde) SearchQueryRunnerFactory(io.druid.query.search.SearchQueryRunnerFactory) SearchStrategySelector(io.druid.query.search.SearchStrategySelector) SearchQueryQueryToolChest(io.druid.query.search.SearchQueryQueryToolChest) QueryableIndex(io.druid.segment.QueryableIndex) InputRow(io.druid.data.input.InputRow) File(java.io.File) Setup(org.openjdk.jmh.annotations.Setup)

Example 3 with SearchQueryQueryToolChest

use of io.druid.query.search.SearchQueryQueryToolChest in project druid by druid-io.

the class CachingClusteredClientTest method testSearchCachingRenamedOutput.

@Test
public void testSearchCachingRenamedOutput() throws Exception {
    final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder().dataSource(DATA_SOURCE).filters(DIM_FILTER).granularity(GRANULARITY).limit(1000).intervals(SEG_SPEC).dimensions(Arrays.asList(TOP_DIM)).query("how").context(CONTEXT);
    testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4));
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new SearchQueryQueryToolChest(new SearchQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
    TestHelper.assertExpectedResults(makeSearchResults("new_dim", new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").dimensions(new DefaultDimensionSpec(TOP_DIM, "new_dim")).build(), context));
}
Also used : SearchQueryConfig(io.druid.query.search.search.SearchQueryConfig) HashMap(java.util.HashMap) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) SearchQueryQueryToolChest(io.druid.query.search.SearchQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) Druids(io.druid.query.Druids) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 4 with SearchQueryQueryToolChest

use of io.druid.query.search.SearchQueryQueryToolChest in project druid by druid-io.

the class CachingClusteredClientTest method testSearchCaching.

@Test
public void testSearchCaching() throws Exception {
    final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder().dataSource(DATA_SOURCE).filters(DIM_FILTER).granularity(GRANULARITY).limit(1000).intervals(SEG_SPEC).dimensions(Arrays.asList(TOP_DIM)).query("how").context(CONTEXT);
    testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4));
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new SearchQueryQueryToolChest(new SearchQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
    HashMap<String, Object> context = new HashMap<String, Object>();
    TestHelper.assertExpectedResults(makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
Also used : SearchQueryQueryToolChest(io.druid.query.search.SearchQueryQueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) SearchQueryConfig(io.druid.query.search.search.SearchQueryConfig) HashMap(java.util.HashMap) Druids(io.druid.query.Druids) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Aggregations

SearchQueryQueryToolChest (io.druid.query.search.SearchQueryQueryToolChest)4 SearchQueryConfig (io.druid.query.search.search.SearchQueryConfig)4 Druids (io.druid.query.Druids)2 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)2 QueryRunner (io.druid.query.QueryRunner)2 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)2 SearchQueryRunnerFactory (io.druid.query.search.SearchQueryRunnerFactory)2 SearchStrategySelector (io.druid.query.search.SearchStrategySelector)2 HashMap (java.util.HashMap)2 DateTime (org.joda.time.DateTime)2 Interval (org.joda.time.Interval)2 Test (org.junit.Test)2 BenchmarkDataGenerator (io.druid.benchmark.datagen.BenchmarkDataGenerator)1 InputRow (io.druid.data.input.InputRow)1 HyperUniquesSerde (io.druid.query.aggregation.hyperloglog.HyperUniquesSerde)1 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)1 TimeBoundaryQueryRunnerFactory (io.druid.query.timeboundary.TimeBoundaryQueryRunnerFactory)1 TimeseriesQueryRunnerFactory (io.druid.query.timeseries.TimeseriesQueryRunnerFactory)1 TopNQueryRunnerFactory (io.druid.query.topn.TopNQueryRunnerFactory)1 IndexSpec (io.druid.segment.IndexSpec)1