Search in sources :

Example 26 with ResponseContext

use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.

the class ServerManagerTest method testGetQueryRunnerForSegmentsWhenTimelineEntryIsMissingReportingMissingSegments.

@Test
public void testGetQueryRunnerForSegmentsWhenTimelineEntryIsMissingReportingMissingSegments() {
    final Interval interval = Intervals.of("P1d/2011-04-01");
    final SearchQuery query = searchQuery("test", interval, Granularities.ALL);
    final List<SegmentDescriptor> unknownSegments = Collections.singletonList(new SegmentDescriptor(interval, "unknown_version", 0));
    final QueryRunner<Result<SearchResultValue>> queryRunner = serverManager.getQueryRunnerForSegments(query, unknownSegments);
    final ResponseContext responseContext = DefaultResponseContext.createEmpty();
    final List<Result<SearchResultValue>> results = queryRunner.run(QueryPlus.wrap(query), responseContext).toList();
    Assert.assertTrue(results.isEmpty());
    Assert.assertNotNull(responseContext.getMissingSegments());
    Assert.assertEquals(unknownSegments, responseContext.getMissingSegments());
}
Also used : SearchQuery(org.apache.druid.query.search.SearchQuery) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) DefaultResponseContext(org.apache.druid.query.context.DefaultResponseContext) ResponseContext(org.apache.druid.query.context.ResponseContext) Interval(org.joda.time.Interval) Result(org.apache.druid.query.Result) Test(org.junit.Test)

Example 27 with ResponseContext

use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(final InputStream in, @QueryParam("pretty") final String pretty, // used to get request content-type,Accept header, remote address and auth-related headers
@Context final HttpServletRequest req) throws IOException {
    final QueryLifecycle queryLifecycle = queryLifecycleFactory.factorize();
    Query<?> query = null;
    final ResourceIOReaderWriter ioReaderWriter = createResourceIOReaderWriter(req, pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        queryLifecycle.initialize(readQuery(req, in, ioReaderWriter));
        query = queryLifecycle.getQuery();
        final String queryId = query.getId();
        final String queryThreadName = StringUtils.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getTableNames(), queryId);
        Thread.currentThread().setName(queryThreadName);
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        final Access authResult = queryLifecycle.authorize(req);
        if (!authResult.isAllowed()) {
            throw new ForbiddenException(authResult.toString());
        }
        final QueryLifecycle.QueryResponse queryResponse = queryLifecycle.execute();
        final Sequence<?> results = queryResponse.getResults();
        final ResponseContext responseContext = queryResponse.getResponseContext();
        final String prevEtag = getPreviousEtag(req);
        if (prevEtag != null && prevEtag.equals(responseContext.getEntityTag())) {
            queryLifecycle.emitLogsAndMetrics(null, req.getRemoteAddr(), -1);
            successfulQueryCount.incrementAndGet();
            return Response.notModified().build();
        }
        final Yielder<?> yielder = Yielders.each(results);
        try {
            boolean shouldFinalize = QueryContexts.isFinalize(query, true);
            boolean serializeDateTimeAsLong = QueryContexts.isSerializeDateTimeAsLong(query, false) || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false));
            final ObjectWriter jsonWriter = ioReaderWriter.getResponseWriter().newOutputWriter(queryLifecycle.getToolChest(), queryLifecycle.getQuery(), serializeDateTimeAsLong);
            Response.ResponseBuilder responseBuilder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws WebApplicationException {
                    Exception e = null;
                    CountingOutputStream os = new CountingOutputStream(outputStream);
                    try {
                        // json serializer will always close the yielder
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                    } catch (Exception ex) {
                        e = ex;
                        log.noStackTrace().error(ex, "Unable to send query response.");
                        throw new RuntimeException(ex);
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), os.getCount());
                        if (e == null) {
                            successfulQueryCount.incrementAndGet();
                        } else {
                            failedQueryCount.incrementAndGet();
                        }
                    }
                }
            }, ioReaderWriter.getResponseWriter().getResponseType()).header("X-Druid-Query-Id", queryId);
            transferEntityTag(responseContext, responseBuilder);
            DirectDruidClient.removeMagicResponseContextFields(responseContext);
            // Limit the response-context header, see https://github.com/apache/druid/issues/2331
            // Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            // and encodes the string using ASCII, so 1 char is = 1 byte
            final ResponseContext.SerializationResult serializationResult = responseContext.serializeWith(jsonMapper, responseContextConfig.getMaxResponseContextHeaderSize());
            if (serializationResult.isTruncated()) {
                final String logToPrint = StringUtils.format("Response Context truncated for id [%s]. Full context is [%s].", queryId, serializationResult.getFullResult());
                if (responseContextConfig.shouldFailOnTruncatedResponseContext()) {
                    log.error(logToPrint);
                    throw new QueryInterruptedException(new TruncatedResponseContextException("Serialized response context exceeds the max size[%s]", responseContextConfig.getMaxResponseContextHeaderSize()), selfNode.getHostAndPortToUse());
                } else {
                    log.warn(logToPrint);
                }
            }
            return responseBuilder.header(HEADER_RESPONSE_CONTEXT, serializationResult.getResult()).build();
        } catch (QueryException e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw e;
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw new RuntimeException(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        interruptedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotError(e);
    } catch (QueryTimeoutException timeout) {
        timedOutQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(timeout, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotTimeout(timeout);
    } catch (QueryCapacityExceededException cap) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(cap, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotLimited(cap);
    } catch (QueryUnsupportedException unsupported) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(unsupported, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotUnsupported(unsupported);
    } catch (BadJsonQueryException | ResourceLimitExceededException e) {
        interruptedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotBadQuery(e);
    } catch (ForbiddenException e) {
        // send an error response if this is thrown.
        throw e;
    } catch (Exception e) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        log.noStackTrace().makeAlert(e, "Exception handling request").addData("query", query != null ? jsonMapper.writeValueAsString(query) : "unparseable query").addData("peer", req.getRemoteAddr()).emit();
        return ioReaderWriter.getResponseWriter().gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(org.apache.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) CountingOutputStream(com.google.common.io.CountingOutputStream) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) TruncatedResponseContextException(org.apache.druid.query.TruncatedResponseContextException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) JsonParseException(com.fasterxml.jackson.core.JsonParseException) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) WebApplicationException(javax.ws.rs.WebApplicationException) QueryException(org.apache.druid.query.QueryException) BadQueryException(org.apache.druid.query.BadQueryException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) TruncatedResponseContextException(org.apache.druid.query.TruncatedResponseContextException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) Response(javax.ws.rs.core.Response) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) QueryException(org.apache.druid.query.QueryException) BadQueryException(org.apache.druid.query.BadQueryException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 28 with ResponseContext

use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.

the class TimewarpOperatorTest method testEmptyFutureInterval.

@Test
public void testEmptyFutureInterval() {
    QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
            final Query<Result<TimeseriesResultValue>> query = queryPlus.getQuery();
            return Sequences.simple(ImmutableList.of(new Result<>(query.getIntervals().get(0).getStart(), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 3)))));
        }
    }, DateTimes.of("2014-08-02").getMillis());
    final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-08-06/2014-08-08").aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
    Assert.assertEquals(Lists.newArrayList(new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)))), queryRunner.run(QueryPlus.wrap(query)).toList());
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ResponseContext(org.apache.druid.query.context.ResponseContext) Sequence(org.apache.druid.java.util.common.guava.Sequence) Test(org.junit.Test)

Example 29 with ResponseContext

use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.

the class TimewarpOperatorTest method testPostProcess.

@Test
public void testPostProcess() {
    QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {

        @Override
        public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
            return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-09"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-01-11"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))));
        }
    }, DateTimes.of("2014-08-02").getMillis());
    final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31/2014-08-05").aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
    Assert.assertEquals(Lists.newArrayList(new Result<>(DateTimes.of("2014-07-31"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))), queryRunner.run(QueryPlus.wrap(query)).toList());
    TimewarpOperator<Result<TimeBoundaryResultValue>> timeBoundaryOperator = new TimewarpOperator<>(new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), // align on Monday
    DateTimes.of("2014-01-06"));
    QueryRunner<Result<TimeBoundaryResultValue>> timeBoundaryRunner = timeBoundaryOperator.postProcess(new QueryRunner<Result<TimeBoundaryResultValue>>() {

        @Override
        public Sequence<Result<TimeBoundaryResultValue>> run(QueryPlus<Result<TimeBoundaryResultValue>> queryPlus, ResponseContext responseContext) {
            return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-12"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", DateTimes.of("2014-01-12"))))));
        }
    }, DateTimes.of("2014-08-02").getMillis());
    final Query<Result<TimeBoundaryResultValue>> timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("dummy").build();
    Assert.assertEquals(Collections.singletonList(new Result<>(DateTimes.of("2014-08-02"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", DateTimes.of("2014-08-02"))))), timeBoundaryRunner.run(QueryPlus.wrap(timeBoundaryQuery)).toList());
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) Period(org.joda.time.Period) Sequence(org.apache.druid.java.util.common.guava.Sequence) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ResponseContext(org.apache.druid.query.context.ResponseContext) TimeBoundaryResultValue(org.apache.druid.query.timeboundary.TimeBoundaryResultValue) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 30 with ResponseContext

use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.

the class SegmentMetadataQueryQueryToolChest method mergeResults.

@Override
public QueryRunner<SegmentAnalysis> mergeResults(final QueryRunner<SegmentAnalysis> runner) {
    return new BySegmentSkippingQueryRunner<SegmentAnalysis>(runner) {

        @Override
        public Sequence<SegmentAnalysis> doRun(QueryRunner<SegmentAnalysis> baseRunner, QueryPlus<SegmentAnalysis> queryPlus, ResponseContext context) {
            SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) queryPlus.getQuery()).withFinalizedAnalysisTypes(config);
            QueryPlus<SegmentAnalysis> updatedQueryPlus = queryPlus.withQuery(updatedQuery);
            return new MappedSequence<>(CombiningSequence.create(baseRunner.run(updatedQueryPlus, context), makeOrdering(updatedQuery), createMergeFn(updatedQuery)), MERGE_TRANSFORM_FN::apply);
        }

        private Ordering<SegmentAnalysis> makeOrdering(SegmentMetadataQuery query) {
            return (Ordering<SegmentAnalysis>) SegmentMetadataQueryQueryToolChest.this.createResultComparator(query);
        }

        private BinaryOperator<SegmentAnalysis> createMergeFn(final SegmentMetadataQuery inQ) {
            return SegmentMetadataQueryQueryToolChest.this.createMergeFn(inQ);
        }
    };
}
Also used : MappedSequence(org.apache.druid.java.util.common.guava.MappedSequence) SegmentMetadataQuery(org.apache.druid.query.metadata.metadata.SegmentMetadataQuery) ResponseContext(org.apache.druid.query.context.ResponseContext) Ordering(com.google.common.collect.Ordering) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) BySegmentSkippingQueryRunner(org.apache.druid.query.BySegmentSkippingQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) BySegmentSkippingQueryRunner(org.apache.druid.query.BySegmentSkippingQueryRunner) QueryPlus(org.apache.druid.query.QueryPlus)

Aggregations

ResponseContext (org.apache.druid.query.context.ResponseContext)65 Test (org.junit.Test)44 QueryRunner (org.apache.druid.query.QueryRunner)39 QueryPlus (org.apache.druid.query.QueryPlus)35 Sequence (org.apache.druid.java.util.common.guava.Sequence)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)26 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)23 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)22 Interval (org.joda.time.Interval)19 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)16 ArrayList (java.util.ArrayList)14 MergeSequence (org.apache.druid.java.util.common.guava.MergeSequence)14 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)14 Result (org.apache.druid.query.Result)12 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)12 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)11 List (java.util.List)10 ChainedExecutionQueryRunner (org.apache.druid.query.ChainedExecutionQueryRunner)10 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)10 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)10