use of io.druid.query.search.search.SearchQueryConfig in project druid by druid-io.
the class TestQueryRunners method makeSearchQueryRunner.
public static <T> QueryRunner<T> makeSearchQueryRunner(Segment adapter) {
final SearchQueryConfig config = new SearchQueryConfig();
QueryRunnerFactory factory = new SearchQueryRunnerFactory(new SearchStrategySelector(Suppliers.ofInstance(config)), new SearchQueryQueryToolChest(config, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
return new FinalizeResultsQueryRunner<T>(factory.createRunner(adapter), factory.getToolchest());
}
use of io.druid.query.search.search.SearchQueryConfig in project druid by druid-io.
the class SearchQueryRunnerWithCaseTest method constructorFeeder.
@Parameterized.Parameters
public static Iterable<Object[]> constructorFeeder() throws IOException {
final SearchQueryConfig[] configs = new SearchQueryConfig[3];
configs[0] = new SearchQueryConfig();
configs[0].setSearchStrategy(UseIndexesStrategy.NAME);
configs[1] = new SearchQueryConfig();
configs[1].setSearchStrategy(CursorOnlyStrategy.NAME);
configs[2] = new SearchQueryConfig();
configs[2].setSearchStrategy(AutoStrategy.NAME);
CharSource input = CharSource.wrap("2011-01-12T00:00:00.000Z\tspot\tAutoMotive\t1000\t10000.0\t100000\tPREFERRED\tapreferred\t100.000000\n" + "2011-01-12T00:00:00.000Z\tSPot\tbusiness\t1100\t11000.0\t110000\tpreferred\tbPreferred\t100.000000\n" + "2011-01-12T00:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t120000\tPREFERRed\tepreferred\t100.000000\n" + "2011-01-13T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t100000\tpreferred\tapreferred\t94.874713");
IncrementalIndex index1 = TestIndex.makeRealtimeIndex(input);
IncrementalIndex index2 = TestIndex.makeRealtimeIndex(input);
QueryableIndex index3 = TestIndex.persistRealtimeAndLoadMMapped(index1);
QueryableIndex index4 = TestIndex.persistRealtimeAndLoadMMapped(index2);
final List<QueryRunner<Result<SearchResultValue>>> runners = Lists.newArrayList();
for (int i = 0; i < configs.length; i++) {
runners.addAll(Arrays.asList(makeQueryRunner(makeRunnerFactory(configs[i]), "index1", new IncrementalIndexSegment(index1, "index1"), "index1"), makeQueryRunner(makeRunnerFactory(configs[i]), "index2", new IncrementalIndexSegment(index2, "index2"), "index2"), makeQueryRunner(makeRunnerFactory(configs[i]), "index3", new QueryableIndexSegment("index3", index3), "index3"), makeQueryRunner(makeRunnerFactory(configs[i]), "index4", new QueryableIndexSegment("index4", index4), "index4")));
}
return transformToConstructionFeeder(runners);
}
use of io.druid.query.search.search.SearchQueryConfig in project druid by druid-io.
the class SearchBenchmark method setup.
@Setup
public void setup() throws IOException {
log.info("SETUP CALLED AT " + +System.currentTimeMillis());
if (ComplexMetrics.getSerdeForType("hyperUnique") == null) {
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault()));
}
executorService = Execs.multiThreaded(numSegments, "SearchThreadPool");
setupQueries();
String[] schemaQuery = schemaAndQuery.split("\\.");
String schemaName = schemaQuery[0];
String queryName = schemaQuery[1];
schemaInfo = BenchmarkSchemas.SCHEMA_MAP.get(schemaName);
queryBuilder = SCHEMA_QUERY_MAP.get(schemaName).get(queryName);
queryBuilder.limit(limit);
query = queryBuilder.build();
incIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) {
log.info("Generating rows for segment " + i);
BenchmarkDataGenerator gen = new BenchmarkDataGenerator(schemaInfo.getColumnSchemas(), System.currentTimeMillis(), schemaInfo.getDataInterval(), rowsPerSegment);
IncrementalIndex incIndex = makeIncIndex();
for (int j = 0; j < rowsPerSegment; j++) {
InputRow row = gen.nextRow();
if (j % 10000 == 0) {
log.info(j + " rows generated.");
}
incIndex.add(row);
}
incIndexes.add(incIndex);
}
tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpDir.getAbsolutePath());
qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist(incIndexes.get(i), tmpDir, new IndexSpec());
QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
qIndexes.add(qIndex);
}
final SearchQueryConfig config = new SearchQueryConfig().withOverrides(query);
factory = new SearchQueryRunnerFactory(new SearchStrategySelector(Suppliers.ofInstance(config)), new SearchQueryQueryToolChest(config, QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()), QueryBenchmarkUtil.NOOP_QUERYWATCHER);
}
use of io.druid.query.search.search.SearchQueryConfig in project druid by druid-io.
the class CachingClusteredClientTest method testSearchCachingRenamedOutput.
@Test
public void testSearchCachingRenamedOutput() throws Exception {
final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder().dataSource(DATA_SOURCE).filters(DIM_FILTER).granularity(GRANULARITY).limit(1000).intervals(SEG_SPEC).dimensions(Arrays.asList(TOP_DIM)).query("how").context(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4));
QueryRunner runner = new FinalizeResultsQueryRunner(client, new SearchQueryQueryToolChest(new SearchQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
TestHelper.assertExpectedResults(makeSearchResults("new_dim", new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").dimensions(new DefaultDimensionSpec(TOP_DIM, "new_dim")).build(), context));
}
use of io.druid.query.search.search.SearchQueryConfig in project druid by druid-io.
the class CachingClusteredClientTest method testSearchCaching.
@Test
public void testSearchCaching() throws Exception {
final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder().dataSource(DATA_SOURCE).filters(DIM_FILTER).granularity(GRANULARITY).limit(1000).intervals(SEG_SPEC).dimensions(Arrays.asList(TOP_DIM)).query("how").context(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4));
QueryRunner runner = new FinalizeResultsQueryRunner(client, new SearchQueryQueryToolChest(new SearchQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
Aggregations