use of io.druid.query.QueryRunnerFactory in project druid by druid-io.
the class TopNQueryQueryToolChestTest method testMinTopNThreshold.
@Test
public void testMinTopNThreshold() throws Exception {
TopNQueryConfig config = new TopNQueryConfig();
final TopNQueryQueryToolChest chest = new TopNQueryQueryToolChest(config, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator());
QueryRunnerFactory factory = new TopNQueryRunnerFactory(TestQueryRunners.getPool(), chest, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner<Result<TopNResultValue>> runner = QueryRunnerTestHelper.makeQueryRunner(factory, new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), segmentId), null);
Map<String, Object> context = Maps.newHashMap();
context.put("minTopNThreshold", 500);
TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(QueryRunnerTestHelper.placementishDimension).metric(QueryRunnerTestHelper.indexMetric).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(QueryRunnerTestHelper.commonAggregators);
TopNQuery query1 = builder.threshold(10).context(null).build();
MockQueryRunner mockRunner = new MockQueryRunner(runner);
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(query1, ImmutableMap.<String, Object>of());
Assert.assertEquals(1000, mockRunner.query.getThreshold());
TopNQuery query2 = builder.threshold(10).context(context).build();
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(query2, ImmutableMap.<String, Object>of());
Assert.assertEquals(500, mockRunner.query.getThreshold());
TopNQuery query3 = builder.threshold(2000).context(context).build();
new TopNQueryQueryToolChest.ThresholdAdjustingQueryRunner(mockRunner, config).run(query3, ImmutableMap.<String, Object>of());
Assert.assertEquals(2000, mockRunner.query.getThreshold());
}
use of io.druid.query.QueryRunnerFactory in project druid by druid-io.
the class TopNQueryRunnerBenchmark method setUp.
@BeforeClass
public static void setUp() throws Exception {
QueryRunnerFactory factory = new TopNQueryRunnerFactory(new StupidPool<ByteBuffer>("TopNQueryRunnerFactory-directBufferPool", new Supplier<ByteBuffer>() {
@Override
public ByteBuffer get() {
// Instead of causing a circular dependency, we simply mimic its behavior
return ByteBuffer.allocateDirect(2000);
}
}), new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
testCaseMap.put(TestCases.rtIndex, QueryRunnerTestHelper.makeQueryRunner(factory, new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), segmentId), null));
testCaseMap.put(TestCases.mMappedTestIndex, QueryRunnerTestHelper.makeQueryRunner(factory, new QueryableIndexSegment(segmentId, TestIndex.getMMappedTestIndex()), null));
testCaseMap.put(TestCases.mergedRealtimeIndex, QueryRunnerTestHelper.makeQueryRunner(factory, new QueryableIndexSegment(segmentId, TestIndex.mergedRealtimeIndex()), null));
//Thread.sleep(10000);
}
use of io.druid.query.QueryRunnerFactory in project druid by druid-io.
the class OnheapIncrementalIndexBenchmark method testConcurrentAddRead.
@Ignore
@Test
@BenchmarkOptions(callgc = true, clock = Clock.REAL_TIME, warmupRounds = 10, benchmarkRounds = 20)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
final int taskCount = 30;
final int concurrentThreads = 3;
final int elementsPerThread = 1 << 15;
final OnheapIncrementalIndex incrementalIndex = this.incrementalIndex.getConstructor(Long.TYPE, Granularity.class, AggregatorFactory[].class, Integer.TYPE).newInstance(0, Granularities.NONE, factories, elementsPerThread * taskCount);
final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1);
queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
for (int i = 0; i < dimensionCount; ++i) {
queryAggregatorFactories.add(new LongSumAggregatorFactory(String.format("sumResult%s", i), String.format("sumResult%s", i)));
queryAggregatorFactories.add(new DoubleSumAggregatorFactory(String.format("doubleSumResult%s", i), String.format("doubleSumResult%s", i)));
}
final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
final long timestamp = System.currentTimeMillis();
final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
final List<ListenableFuture<?>> indexFutures = new LinkedList<>();
final List<ListenableFuture<?>> queryFutures = new LinkedList<>();
final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null);
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
final AtomicInteger currentlyRunning = new AtomicInteger(0);
final AtomicBoolean concurrentlyRan = new AtomicBoolean(false);
final AtomicBoolean someoneRan = new AtomicBoolean(false);
for (int j = 0; j < taskCount; j++) {
indexFutures.add(indexExecutor.submit(new Runnable() {
@Override
public void run() {
currentlyRunning.incrementAndGet();
try {
for (int i = 0; i < elementsPerThread; i++) {
incrementalIndex.add(getLongRow(timestamp + i, 1, dimensionCount));
}
} catch (IndexSizeExceededException e) {
throw Throwables.propagate(e);
}
currentlyRunning.decrementAndGet();
someoneRan.set(true);
}
}));
queryFutures.add(queryExecutor.submit(new Runnable() {
@Override
public void run() {
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
Map<String, Object> context = new HashMap<String, Object>();
for (Result<TimeseriesResultValue> result : Sequences.toList(runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>())) {
if (someoneRan.get()) {
Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0);
}
}
if (currentlyRunning.get() > 0) {
concurrentlyRan.set(true);
}
}
}));
}
List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
allFutures.addAll(queryFutures);
allFutures.addAll(indexFutures);
Futures.allAsList(allFutures).get();
//Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get());
queryExecutor.shutdown();
indexExecutor.shutdown();
QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
Map<String, Object> context = new HashMap<String, Object>();
List<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>());
final int expectedVal = elementsPerThread * taskCount;
for (Result<TimeseriesResultValue> result : results) {
Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue());
for (int i = 0; i < dimensionCount; ++i) {
Assert.assertEquals(String.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue());
Assert.assertEquals(String.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue());
}
}
}
use of io.druid.query.QueryRunnerFactory in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchWithNullValueInDimension.
@Test
public void testSearchWithNullValueInDimension() throws Exception {
IncrementalIndex<Aggregator> index = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE).withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()).build(), true, 10);
index.add(new MapBasedInputRow(1481871600000L, Arrays.asList("name", "host"), ImmutableMap.<String, Object>of("name", "name1", "host", "host")));
index.add(new MapBasedInputRow(1481871670000L, Arrays.asList("name", "table"), ImmutableMap.<String, Object>of("name", "name2", "table", "table")));
SearchQuery searchQuery = Druids.newSearchQueryBuilder().dimensions(new DefaultDimensionSpec("table", "table")).dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).context(ImmutableMap.<String, Object>of("searchStrategy", "cursorOnly")).build();
QueryRunnerFactory factory = new SearchQueryRunnerFactory(selector, toolChest, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = factory.createRunner(new QueryableIndexSegment("asdf", TestIndex.persistRealtimeAndLoadMMapped(index)));
List<SearchHit> expectedHits = Lists.newLinkedList();
expectedHits.add(new SearchHit("table", "table", 1));
expectedHits.add(new SearchHit("table", "", 1));
checkSearchQuery(searchQuery, runner, expectedHits);
}
use of io.druid.query.QueryRunnerFactory in project druid by druid-io.
the class DumpSegment method executeQuery.
private static <T> Sequence<T> executeQuery(final Injector injector, final QueryableIndex index, final Query<T> query) {
final QueryRunnerFactoryConglomerate conglomerate = injector.getInstance(QueryRunnerFactoryConglomerate.class);
final QueryRunnerFactory factory = conglomerate.findFactory(query);
final QueryRunner<T> runner = factory.createRunner(new QueryableIndexSegment("segment", index));
final Sequence results = factory.getToolchest().mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.<QueryRunner>of(runner))).run(query, Maps.<String, Object>newHashMap());
return (Sequence<T>) results;
}
Aggregations