Search in sources :

Example 11 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class ChainedExecutionQueryRunnerTest method testQueryTimeout.

@Test(timeout = 60000)
public void testQueryTimeout() throws Exception {
    ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return "test";
        }

        @Override
        public int getNumThreads() {
            return 2;
        }
    });
    final CountDownLatch queriesStarted = new CountDownLatch(2);
    final CountDownLatch queriesInterrupted = new CountDownLatch(2);
    final CountDownLatch queryIsRegistered = new CountDownLatch(1);
    Capture<ListenableFuture> capturedFuture = new Capture<>();
    QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
    watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {

        @Override
        public Void answer() throws Throwable {
            queryIsRegistered.countDown();
            return null;
        }
    }).once();
    EasyMock.replay(watcher);
    ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
    Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
    ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
    HashMap<String, Object> context = new HashMap<String, Object>();
    final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).context(ImmutableMap.<String, Object>of(QueryContextKeys.TIMEOUT, 100, "queryId", "test")).build(), context);
    Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {

        @Override
        public void run() {
            Sequences.toList(seq, Lists.newArrayList());
        }
    });
    // wait for query to register and start
    queryIsRegistered.await();
    queriesStarted.await();
    Assert.assertTrue(capturedFuture.hasCaptured());
    ListenableFuture future = capturedFuture.getValue();
    // wait for query to time out
    QueryInterruptedException cause = null;
    try {
        resultFuture.get();
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
        Assert.assertEquals("Query timeout", ((QueryInterruptedException) e.getCause()).getErrorCode());
        cause = (QueryInterruptedException) e.getCause();
    }
    queriesInterrupted.await();
    Assert.assertNotNull(cause);
    Assert.assertTrue(future.isCancelled());
    DyingQueryRunner interrupted1 = interrupted.poll();
    synchronized (interrupted1) {
        Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
        Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
    }
    DyingQueryRunner interrupted2 = interrupted.poll();
    synchronized (interrupted2) {
        Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
        Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
    }
    runners.remove(interrupted1);
    runners.remove(interrupted2);
    DyingQueryRunner remainingRunner = runners.iterator().next();
    synchronized (remainingRunner) {
        Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
    }
    Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
    Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
    Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
    EasyMock.verify(watcher);
}
Also used : HashMap(java.util.HashMap) Capture(org.easymock.Capture) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) ExecutionException(java.util.concurrent.ExecutionException) Lifecycle(io.druid.java.util.common.lifecycle.Lifecycle) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IAnswer(org.easymock.IAnswer) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Future(java.util.concurrent.Future) Test(org.junit.Test)

Example 12 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class RetryQueryRunnerTest method testNoDuplicateRetry.

@Test
public void testNoDuplicateRetry() throws Exception {
    Map<String, Object> context = new MapMaker().makeMap();
    context.put("count", 0);
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
            if ((int) context.get("count") == 0) {
                // assume 2 missing segments at first run
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
                context.put("count", 1);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            } else if ((int) context.get("count") == 1) {
                // this is first retry
                Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2);
                // assume only left 1 missing at first retry
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
                context.put("count", 2);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            } else {
                // this is second retry
                Assert.assertTrue("Should retry with 1 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 1);
                // assume no more missing at second retry
                context.put("count", 3);
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            }
        }
    }, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {

        private int numTries = 2;

        private boolean returnPartialResults = false;

        public int getNumTries() {
            return numTries;
        }

        public boolean returnPartialResults() {
            return returnPartialResults;
        }
    }, jsonMapper);
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Assert.assertTrue("Should return a list with 3 elements", ((List) actualResults).size() == 3);
    Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MapMaker(com.google.common.collect.MapMaker) Sequence(io.druid.java.util.common.guava.Sequence) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) DateTime(org.joda.time.DateTime) List(java.util.List) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 13 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class RetryQueryRunnerTest method testRetryMultiple.

@Test
public void testRetryMultiple() throws Exception {
    Map<String, Object> context = new MapMaker().makeMap();
    context.put("count", 0);
    context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
    RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
            if ((int) context.get("count") < 3) {
                ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
                context.put("count", (int) context.get("count") + 1);
                return Sequences.empty();
            } else {
                return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
            }
        }
    }, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {

        private int numTries = 4;

        private boolean returnPartialResults = true;

        public int getNumTries() {
            return numTries;
        }

        public boolean returnPartialResults() {
            return returnPartialResults;
        }
    }, jsonMapper);
    Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
    Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
    Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MapMaker(com.google.common.collect.MapMaker) Sequence(io.druid.java.util.common.guava.Sequence) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) DateTime(org.joda.time.DateTime) List(java.util.List) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 14 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class TimewarpOperatorTest method testEmptyFutureInterval.

@Test
public void testEmptyFutureInterval() throws Exception {
    QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> responseContext) {
            return Sequences.simple(ImmutableList.of(new Result<>(query.getIntervals().get(0).getStart(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))));
        }
    }, new DateTime("2014-08-02").getMillis());
    final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-08-06/2014-08-08").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).build();
    Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))), Sequences.toList(queryRunner.run(query, Maps.<String, Object>newHashMap()), Lists.<Result<TimeseriesResultValue>>newArrayList()));
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) Sequence(io.druid.java.util.common.guava.Sequence) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) DateTime(org.joda.time.DateTime) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Test(org.junit.Test)

Example 15 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class TimewarpOperatorTest method testPostProcess.

@Test
public void testPostProcess() throws Exception {
    QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> responseContext) {
            return Sequences.simple(ImmutableList.of(new Result<>(new DateTime(new DateTime("2014-01-09")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime(new DateTime("2014-01-11")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5)))));
        }
    }, new DateTime("2014-08-02").getMillis());
    final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31/2014-08-05").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).build();
    Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-07-31"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5)))), Sequences.toList(queryRunner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList()));
    TimewarpOperator<Result<TimeBoundaryResultValue>> timeBoundaryOperator = new TimewarpOperator<>(new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), new Period("P1W"), // align on Monday
    new DateTime("2014-01-06"));
    QueryRunner<Result<TimeBoundaryResultValue>> timeBoundaryRunner = timeBoundaryOperator.postProcess(new QueryRunner<Result<TimeBoundaryResultValue>>() {

        @Override
        public Sequence<Result<TimeBoundaryResultValue>> run(Query<Result<TimeBoundaryResultValue>> query, Map<String, Object> responseContext) {
            return Sequences.simple(ImmutableList.of(new Result<>(new DateTime("2014-01-12"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", new DateTime("2014-01-12"))))));
        }
    }, new DateTime("2014-08-02").getMillis());
    final Query<Result<TimeBoundaryResultValue>> timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("dummy").build();
    Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-08-02"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", new DateTime("2014-08-02"))))), Sequences.toList(timeBoundaryRunner.run(timeBoundaryQuery, CONTEXT), Lists.<Result<TimeBoundaryResultValue>>newArrayList()));
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) Period(org.joda.time.Period) Sequence(io.druid.java.util.common.guava.Sequence) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) DateTime(org.joda.time.DateTime) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) TimeBoundaryResultValue(io.druid.query.timeboundary.TimeBoundaryResultValue) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

Sequence (io.druid.java.util.common.guava.Sequence)56 Test (org.junit.Test)35 Interval (org.joda.time.Interval)26 DateTime (org.joda.time.DateTime)16 List (java.util.List)15 Query (io.druid.query.Query)14 Map (java.util.Map)14 QueryRunner (io.druid.query.QueryRunner)13 Result (io.druid.query.Result)12 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)10 MergeSequence (io.druid.java.util.common.guava.MergeSequence)9 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)9 Row (io.druid.data.input.Row)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ArrayList (java.util.ArrayList)7 MapMaker (com.google.common.collect.MapMaker)6