Search in sources :

Example 46 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class SearchQueryRunnerTest method testSearchWithCardinality.

@Test
public void testSearchWithCardinality() {
    final SearchQuery searchQuery = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).query("a").build();
    // double the value
    QueryRunner mergedRunner = toolChest.mergeResults(new QueryRunner<Result<SearchResultValue>>() {

        @Override
        public Sequence<Result<SearchResultValue>> run(Query<Result<SearchResultValue>> query, Map<String, Object> responseContext) {
            final Query<Result<SearchResultValue>> query1 = searchQuery.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-01-12/2011-02-28"))));
            final Query<Result<SearchResultValue>> query2 = searchQuery.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-03-01/2011-04-15"))));
            return Sequences.concat(runner.run(query1, responseContext), runner.run(query2, responseContext));
        }
    });
    List<SearchHit> expectedHits = Lists.newLinkedList();
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 91));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 273));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 91));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 91));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 91));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 182));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 91));
    expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 182));
    checkSearchQuery(searchQuery, mergedRunner, expectedHits);
}
Also used : SearchQuery(io.druid.query.search.search.SearchQuery) Query(io.druid.query.Query) SearchQuery(io.druid.query.search.search.SearchQuery) SearchHit(io.druid.query.search.search.SearchHit) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) Sequence(io.druid.java.util.common.guava.Sequence) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 47 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DumpSegment method executeQuery.

private static <T> Sequence<T> executeQuery(final Injector injector, final QueryableIndex index, final Query<T> query) {
    final QueryRunnerFactoryConglomerate conglomerate = injector.getInstance(QueryRunnerFactoryConglomerate.class);
    final QueryRunnerFactory factory = conglomerate.findFactory(query);
    final QueryRunner<T> runner = factory.createRunner(new QueryableIndexSegment("segment", index));
    final Sequence results = factory.getToolchest().mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.<QueryRunner>of(runner))).run(query, Maps.<String, Object>newHashMap());
    return (Sequence<T>) results;
}
Also used : QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) QueryRunnerFactory(io.druid.query.QueryRunnerFactory) Sequence(io.druid.java.util.common.guava.Sequence)

Example 48 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class AsyncQueryRunnerTest method testAsyncNature.

@Test(timeout = TEST_TIMEOUT)
public void testAsyncNature() throws Exception {
    final CountDownLatch latch = new CountDownLatch(1);
    QueryRunner baseRunner = new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            try {
                latch.await();
                return Sequences.simple(Lists.newArrayList(1));
            } catch (InterruptedException ex) {
                throw Throwables.propagate(ex);
            }
        }
    };
    AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    Sequence lazy = asyncRunner.run(query, Collections.EMPTY_MAP);
    latch.countDown();
    Assert.assertEquals(Lists.newArrayList(1), Sequences.toList(lazy, Lists.newArrayList()));
}
Also used : Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) ImmutableMap(com.google.common.collect.ImmutableMap) Map(java.util.Map) Test(org.junit.Test)

Example 49 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class ChainedExecutionQueryRunnerTest method testQueryCancellation.

@Test(timeout = 60000)
public void testQueryCancellation() throws Exception {
    ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return "test";
        }

        @Override
        public int getNumThreads() {
            return 2;
        }
    });
    final CountDownLatch queriesStarted = new CountDownLatch(2);
    final CountDownLatch queriesInterrupted = new CountDownLatch(2);
    final CountDownLatch queryIsRegistered = new CountDownLatch(1);
    Capture<ListenableFuture> capturedFuture = EasyMock.newCapture();
    QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
    watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {

        @Override
        public Void answer() throws Throwable {
            queryIsRegistered.countDown();
            return null;
        }
    }).once();
    EasyMock.replay(watcher);
    ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
    Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
    ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
    Map<String, Object> context = ImmutableMap.<String, Object>of();
    final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).build(), context);
    Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {

        @Override
        public void run() {
            Sequences.toList(seq, Lists.newArrayList());
        }
    });
    // wait for query to register and start
    queryIsRegistered.await();
    queriesStarted.await();
    // cancel the query
    Assert.assertTrue(capturedFuture.hasCaptured());
    ListenableFuture future = capturedFuture.getValue();
    future.cancel(true);
    QueryInterruptedException cause = null;
    try {
        resultFuture.get();
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
        cause = (QueryInterruptedException) e.getCause();
    }
    queriesInterrupted.await();
    Assert.assertNotNull(cause);
    Assert.assertTrue(future.isCancelled());
    DyingQueryRunner interrupted1 = interrupted.poll();
    synchronized (interrupted1) {
        Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
        Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
    }
    DyingQueryRunner interrupted2 = interrupted.poll();
    synchronized (interrupted2) {
        Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
        Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
    }
    runners.remove(interrupted1);
    runners.remove(interrupted2);
    DyingQueryRunner remainingRunner = runners.iterator().next();
    synchronized (remainingRunner) {
        Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
    }
    Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
    Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
    Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
    EasyMock.verify(watcher);
}
Also used : Lifecycle(io.druid.java.util.common.lifecycle.Lifecycle) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IAnswer(org.easymock.IAnswer) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 50 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    QueryToolChest toolChest = null;
    String queryId = null;
    final ResponseContext context = createContext(req.getContentType(), pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        query = context.getObjectMapper().readValue(in, Query.class);
        queryId = query.getId();
        if (queryId == null) {
            queryId = UUID.randomUUID().toString();
            query = query.withId(queryId);
        }
        if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
            query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
        }
        toolChest = warehouse.getToolChest(query);
        Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        if (authConfig.isEnabled()) {
            // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
            AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
            if (authorizationInfo != null) {
                for (String dataSource : query.getDataSource().getNames()) {
                    Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
                    if (!authResult.isAllowed()) {
                        return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
                    }
                }
            } else {
                throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
            }
        }
        String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
        if (prevEtag != null) {
            query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
        }
        final Map<String, Object> responseContext = new MapMaker().makeMap();
        final Sequence res = query.run(texasRanger, responseContext);
        if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
            return Response.notModified().build();
        }
        final Sequence results;
        if (res == null) {
            results = Sequences.empty();
        } else {
            results = res;
        }
        final Yielder yielder = Yielders.each(results);
        try {
            final Query theQuery = query;
            final QueryToolChest theToolChest = toolChest;
            final ObjectWriter jsonWriter = context.newOutputWriter();
            Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws IOException, WebApplicationException {
                    try {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                        successfulQueryCount.incrementAndGet();
                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
                        requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                    }
                }
            }, context.getContentType()).header("X-Druid-Query-Id", queryId);
            if (responseContext.get(HDR_ETAG) != null) {
                builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
                responseContext.remove(HDR_ETAG);
            }
            //Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
            //Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            //and encodes the string using ASCII, so 1 char is = 1 byte
            String responseCtxString = jsonMapper.writeValueAsString(responseContext);
            if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
                log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
                responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
            }
            return builder.header("X-Druid-Response-Context", responseCtxString).build();
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw Throwables.propagate(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        try {
            log.warn(e, "Exception while processing queryId [%s]", queryId);
            interruptedQueryCount.incrementAndGet();
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", query);
        }
        return context.gotError(e);
    } catch (Exception e) {
        // Input stream has already been consumed by the json object mapper if query == null
        final String queryString = query == null ? "unparsable query" : query.toString();
        log.warn(e, "Exception occurred on request [%s]", queryString);
        failedQueryCount.incrementAndGet();
        try {
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", queryString);
        }
        log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
        return context.gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : Query(io.druid.query.Query) CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(io.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) CountingOutputStream(com.google.common.io.CountingOutputStream) ISE(io.druid.java.util.common.ISE) QueryInterruptedException(io.druid.query.QueryInterruptedException) Yielder(io.druid.java.util.common.guava.Yielder) Resource(io.druid.server.security.Resource) MapMaker(com.google.common.collect.MapMaker) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Sequence(io.druid.java.util.common.guava.Sequence) AuthorizationInfo(io.druid.server.security.AuthorizationInfo) WebApplicationException(javax.ws.rs.WebApplicationException) QueryInterruptedException(io.druid.query.QueryInterruptedException) IOException(java.io.IOException) Response(javax.ws.rs.core.Response) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Aggregations

Sequence (io.druid.java.util.common.guava.Sequence)56 Test (org.junit.Test)35 Interval (org.joda.time.Interval)26 DateTime (org.joda.time.DateTime)16 List (java.util.List)15 Query (io.druid.query.Query)14 Map (java.util.Map)14 QueryRunner (io.druid.query.QueryRunner)13 Result (io.druid.query.Result)12 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)10 MergeSequence (io.druid.java.util.common.guava.MergeSequence)9 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)9 Row (io.druid.data.input.Row)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ArrayList (java.util.ArrayList)7 MapMaker (com.google.common.collect.MapMaker)6