use of java.util.concurrent.ExecutorService in project druid by druid-io.
the class ChainedExecutionQueryRunnerTest method testQueryTimeout.
@Test(timeout = 60000)
public void testQueryTimeout() throws Exception {
ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int getNumThreads() {
return 2;
}
});
final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = new Capture<>();
QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
queryIsRegistered.countDown();
return null;
}
}).once();
EasyMock.replay(watcher);
ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
HashMap<String, Object> context = new HashMap<String, Object>();
final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).context(ImmutableMap.<String, Object>of(QueryContextKeys.TIMEOUT, 100, "queryId", "test")).build(), context);
Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {
@Override
public void run() {
Sequences.toList(seq, Lists.newArrayList());
}
});
// wait for query to register and start
queryIsRegistered.await();
queriesStarted.await();
Assert.assertTrue(capturedFuture.hasCaptured());
ListenableFuture future = capturedFuture.getValue();
// wait for query to time out
QueryInterruptedException cause = null;
try {
resultFuture.get();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
Assert.assertEquals("Query timeout", ((QueryInterruptedException) e.getCause()).getErrorCode());
cause = (QueryInterruptedException) e.getCause();
}
queriesInterrupted.await();
Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled());
DyingQueryRunner interrupted1 = interrupted.poll();
synchronized (interrupted1) {
Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
}
DyingQueryRunner interrupted2 = interrupted.poll();
synchronized (interrupted2) {
Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
}
runners.remove(interrupted1);
runners.remove(interrupted2);
DyingQueryRunner remainingRunner = runners.iterator().next();
synchronized (remainingRunner) {
Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
}
Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
EasyMock.verify(watcher);
}
use of java.util.concurrent.ExecutorService in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResults.
@Test
public void testBySegmentResults() {
int segmentCount = 32;
Result<BySegmentResultValue> singleSegmentResult = new Result<BySegmentResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass(Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = Lists.newArrayList();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).setContext(ImmutableMap.<String, Object>of("bySegment", true));
final GroupByQuery fullQuery = builder.build();
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(fullQuery, Maps.newHashMap()), "");
exec.shutdownNow();
}
use of java.util.concurrent.ExecutorService in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithDefaultAnalysisMerge.
private void testSegmentMetadataQueryWithDefaultAnalysisMerge(String column, ColumnAnalysis analysis) {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), ImmutableMap.of("__time", new ColumnAnalysis(ValueType.LONG.toString(), false, 12090 * 2, null, null, null, null), "index", new ColumnAnalysis(ValueType.FLOAT.toString(), false, 9672 * 2, null, null, null, null), column, analysis), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
Query query = testQuery.withColumns(new ListColumnIncluderator(Arrays.asList("__time", "index", column)));
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(query, Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
use of java.util.concurrent.ExecutorService in project druid by druid-io.
the class SegmentMetadataQueryTest method testBySegmentResults.
@Test
public void testBySegmentResults() {
Result<BySegmentResultValue> bySegmentResult = new Result<BySegmentResultValue>(expectedSegmentAnalysis1.getIntervals().get(0).getStart(), new BySegmentResultValueClass(Arrays.asList(expectedSegmentAnalysis1), expectedSegmentAnalysis1.getId(), testQuery.getIntervals().get(0)));
QueryToolChest toolChest = FACTORY.getToolchest();
QueryRunner singleSegmentQueryRunner = toolChest.preMergeQueryDecoration(runner1);
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), //the bug surfaces only when ordering is used which happens only when you have 2 things to compare
Lists.<QueryRunner<SegmentAnalysis>>newArrayList(singleSegmentQueryRunner, singleSegmentQueryRunner))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(bySegmentResult, bySegmentResult), myRunner.run(testQuery.withOverriddenContext(ImmutableMap.<String, Object>of("bySegment", true)), Maps.newHashMap()), "failed SegmentMetadata bySegment query");
exec.shutdownNow();
}
use of java.util.concurrent.ExecutorService in project druid by druid-io.
the class SegmentMetadataQueryTest method testSegmentMetadataQueryWithComplexColumnMerge.
@Test
public void testSegmentMetadataQueryWithComplexColumnMerge() {
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(differentIds ? "merged" : "testSegment", null, ImmutableMap.of("placement", new ColumnAnalysis(ValueType.STRING.toString(), false, 0, 1, null, null, null), "quality_uniques", new ColumnAnalysis("hyperUnique", false, 0, null, null, null, null)), 0, expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, null, null);
QueryToolChest toolChest = FACTORY.getToolchest();
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner myRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(FACTORY.mergeRunners(MoreExecutors.sameThreadExecutor(), Lists.<QueryRunner<SegmentAnalysis>>newArrayList(toolChest.preMergeQueryDecoration(runner1), toolChest.preMergeQueryDecoration(runner2)))), toolChest);
TestHelper.assertExpectedObjects(ImmutableList.of(mergedSegmentAnalysis), myRunner.run(Druids.newSegmentMetadataQueryBuilder().dataSource("testing").intervals("2013/2014").toInclude(new ListColumnIncluderator(Arrays.asList("placement", "quality_uniques"))).analysisTypes(SegmentMetadataQuery.AnalysisType.CARDINALITY).merge(true).build(), Maps.newHashMap()), "failed SegmentMetadata merging query");
exec.shutdownNow();
}
Aggregations