Search in sources :

Example 1 with QueryUnsupportedException

use of org.apache.druid.query.QueryUnsupportedException in project druid by druid-io.

the class SqlResource method doPost.

@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery, @Context final HttpServletRequest req) throws IOException {
    final SqlLifecycle lifecycle = sqlLifecycleFactory.factorize();
    final String sqlQueryId = lifecycle.initialize(sqlQuery.getQuery(), sqlQuery.getContext());
    final String remoteAddr = req.getRemoteAddr();
    final String currThreadName = Thread.currentThread().getName();
    try {
        Thread.currentThread().setName(StringUtils.format("sql[%s]", sqlQueryId));
        lifecycle.setParameters(sqlQuery.getParameterList());
        lifecycle.validateAndAuthorize(req);
        // must add after lifecycle is authorized
        sqlLifecycleManager.add(sqlQueryId, lifecycle);
        lifecycle.plan();
        final SqlRowTransformer rowTransformer = lifecycle.createRowTransformer();
        final Sequence<Object[]> sequence = lifecycle.execute();
        final Yielder<Object[]> yielder0 = Yielders.each(sequence);
        try {
            final Response.ResponseBuilder responseBuilder = Response.ok((StreamingOutput) outputStream -> {
                Exception e = null;
                CountingOutputStream os = new CountingOutputStream(outputStream);
                Yielder<Object[]> yielder = yielder0;
                try (final ResultFormat.Writer writer = sqlQuery.getResultFormat().createFormatter(os, jsonMapper)) {
                    writer.writeResponseStart();
                    if (sqlQuery.includeHeader()) {
                        writer.writeHeader(rowTransformer.getRowType(), sqlQuery.includeTypesHeader(), sqlQuery.includeSqlTypesHeader());
                    }
                    while (!yielder.isDone()) {
                        final Object[] row = yielder.get();
                        writer.writeRowStart();
                        for (int i = 0; i < rowTransformer.getFieldList().size(); i++) {
                            final Object value = rowTransformer.transform(row, i);
                            writer.writeRowField(rowTransformer.getFieldList().get(i), value);
                        }
                        writer.writeRowEnd();
                        yielder = yielder.next(null);
                    }
                    writer.writeResponseEnd();
                } catch (Exception ex) {
                    e = ex;
                    log.error(ex, "Unable to send SQL response [%s]", sqlQueryId);
                    throw new RuntimeException(ex);
                } finally {
                    yielder.close();
                    endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, os.getCount());
                }
            }).header(SQL_QUERY_ID_RESPONSE_HEADER, sqlQueryId);
            if (sqlQuery.includeHeader()) {
                responseBuilder.header(SQL_HEADER_RESPONSE_HEADER, SQL_HEADER_VALUE);
            }
            return responseBuilder.build();
        } catch (Throwable e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder0.close();
            throw new RuntimeException(e);
        }
    } catch (QueryCapacityExceededException cap) {
        endLifecycle(sqlQueryId, lifecycle, cap, remoteAddr, -1);
        return buildNonOkResponse(QueryCapacityExceededException.STATUS_CODE, cap, sqlQueryId);
    } catch (QueryUnsupportedException unsupported) {
        endLifecycle(sqlQueryId, lifecycle, unsupported, remoteAddr, -1);
        return buildNonOkResponse(QueryUnsupportedException.STATUS_CODE, unsupported, sqlQueryId);
    } catch (QueryTimeoutException timeout) {
        endLifecycle(sqlQueryId, lifecycle, timeout, remoteAddr, -1);
        return buildNonOkResponse(QueryTimeoutException.STATUS_CODE, timeout, sqlQueryId);
    } catch (SqlPlanningException | ResourceLimitExceededException e) {
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        return buildNonOkResponse(BadQueryException.STATUS_CODE, e, sqlQueryId);
    } catch (ForbiddenException e) {
        endLifecycleWithoutEmittingMetrics(sqlQueryId, lifecycle);
        throw (ForbiddenException) serverConfig.getErrorResponseTransformStrategy().transformIfNeeded(// let ForbiddenExceptionMapper handle this
        e);
    } catch (RelOptPlanner.CannotPlanException e) {
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        SqlPlanningException spe = new SqlPlanningException(SqlPlanningException.PlanningError.UNSUPPORTED_SQL_ERROR, e.getMessage());
        return buildNonOkResponse(BadQueryException.STATUS_CODE, spe, sqlQueryId);
    }// calcite throws a java.lang.AssertionError which is type error not exception. using throwable will catch all
     catch (Throwable e) {
        log.warn(e, "Failed to handle query: %s", sqlQuery);
        endLifecycle(sqlQueryId, lifecycle, e, remoteAddr, -1);
        return buildNonOkResponse(Status.INTERNAL_SERVER_ERROR.getStatusCode(), QueryInterruptedException.wrapIfNeeded(e), sqlQueryId);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : SqlRowTransformer(org.apache.druid.sql.SqlRowTransformer) StreamingOutput(javax.ws.rs.core.StreamingOutput) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) CountingOutputStream(com.google.common.io.CountingOutputStream) SqlPlanningException(org.apache.druid.sql.SqlPlanningException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) BadQueryException(org.apache.druid.query.BadQueryException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) SqlPlanningException(org.apache.druid.sql.SqlPlanningException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) SanitizableException(org.apache.druid.common.exception.SanitizableException) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) Response(javax.ws.rs.core.Response) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 2 with QueryUnsupportedException

use of org.apache.druid.query.QueryUnsupportedException in project druid by druid-io.

the class SqlResourceTest method testUnsupportedQueryThrowsExceptionWithFilterResponse.

@Test
public void testUnsupportedQueryThrowsExceptionWithFilterResponse() throws Exception {
    resource = new SqlResource(JSON_MAPPER, CalciteTests.TEST_AUTHORIZER_MAPPER, sqlLifecycleFactory, lifecycleManager, new ServerConfig() {

        @Override
        public boolean isShowDetailedJettyErrors() {
            return true;
        }

        @Override
        public ErrorResponseTransformStrategy getErrorResponseTransformStrategy() {
            return new AllowedRegexErrorResponseTransformStrategy(ImmutableList.of());
        }
    });
    String errorMessage = "This will be support in Druid 9999";
    SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
    EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
    EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id"));
    EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
    EasyMock.replay(badQuery);
    final QueryException exception = doPost(badQuery).lhs;
    Assert.assertNotNull(exception);
    Assert.assertNull(exception.getMessage());
    Assert.assertNull(exception.getHost());
    Assert.assertEquals(exception.getErrorCode(), QueryUnsupportedException.ERROR_CODE);
    Assert.assertNull(exception.getErrorClass());
    Assert.assertTrue(lifecycleManager.getAll("id").isEmpty());
}
Also used : ServerConfig(org.apache.druid.server.initialization.ServerConfig) UnsupportedSQLQueryException(org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException) QueryException(org.apache.druid.query.QueryException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) AllowedRegexErrorResponseTransformStrategy(org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy) Test(org.junit.Test)

Example 3 with QueryUnsupportedException

use of org.apache.druid.query.QueryUnsupportedException in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(final InputStream in, @QueryParam("pretty") final String pretty, // used to get request content-type,Accept header, remote address and auth-related headers
@Context final HttpServletRequest req) throws IOException {
    final QueryLifecycle queryLifecycle = queryLifecycleFactory.factorize();
    Query<?> query = null;
    final ResourceIOReaderWriter ioReaderWriter = createResourceIOReaderWriter(req, pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        queryLifecycle.initialize(readQuery(req, in, ioReaderWriter));
        query = queryLifecycle.getQuery();
        final String queryId = query.getId();
        final String queryThreadName = StringUtils.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getTableNames(), queryId);
        Thread.currentThread().setName(queryThreadName);
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        final Access authResult = queryLifecycle.authorize(req);
        if (!authResult.isAllowed()) {
            throw new ForbiddenException(authResult.toString());
        }
        final QueryLifecycle.QueryResponse queryResponse = queryLifecycle.execute();
        final Sequence<?> results = queryResponse.getResults();
        final ResponseContext responseContext = queryResponse.getResponseContext();
        final String prevEtag = getPreviousEtag(req);
        if (prevEtag != null && prevEtag.equals(responseContext.getEntityTag())) {
            queryLifecycle.emitLogsAndMetrics(null, req.getRemoteAddr(), -1);
            successfulQueryCount.incrementAndGet();
            return Response.notModified().build();
        }
        final Yielder<?> yielder = Yielders.each(results);
        try {
            boolean shouldFinalize = QueryContexts.isFinalize(query, true);
            boolean serializeDateTimeAsLong = QueryContexts.isSerializeDateTimeAsLong(query, false) || (!shouldFinalize && QueryContexts.isSerializeDateTimeAsLongInner(query, false));
            final ObjectWriter jsonWriter = ioReaderWriter.getResponseWriter().newOutputWriter(queryLifecycle.getToolChest(), queryLifecycle.getQuery(), serializeDateTimeAsLong);
            Response.ResponseBuilder responseBuilder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws WebApplicationException {
                    Exception e = null;
                    CountingOutputStream os = new CountingOutputStream(outputStream);
                    try {
                        // json serializer will always close the yielder
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                    } catch (Exception ex) {
                        e = ex;
                        log.noStackTrace().error(ex, "Unable to send query response.");
                        throw new RuntimeException(ex);
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), os.getCount());
                        if (e == null) {
                            successfulQueryCount.incrementAndGet();
                        } else {
                            failedQueryCount.incrementAndGet();
                        }
                    }
                }
            }, ioReaderWriter.getResponseWriter().getResponseType()).header("X-Druid-Query-Id", queryId);
            transferEntityTag(responseContext, responseBuilder);
            DirectDruidClient.removeMagicResponseContextFields(responseContext);
            // Limit the response-context header, see https://github.com/apache/druid/issues/2331
            // Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            // and encodes the string using ASCII, so 1 char is = 1 byte
            final ResponseContext.SerializationResult serializationResult = responseContext.serializeWith(jsonMapper, responseContextConfig.getMaxResponseContextHeaderSize());
            if (serializationResult.isTruncated()) {
                final String logToPrint = StringUtils.format("Response Context truncated for id [%s]. Full context is [%s].", queryId, serializationResult.getFullResult());
                if (responseContextConfig.shouldFailOnTruncatedResponseContext()) {
                    log.error(logToPrint);
                    throw new QueryInterruptedException(new TruncatedResponseContextException("Serialized response context exceeds the max size[%s]", responseContextConfig.getMaxResponseContextHeaderSize()), selfNode.getHostAndPortToUse());
                } else {
                    log.warn(logToPrint);
                }
            }
            return responseBuilder.header(HEADER_RESPONSE_CONTEXT, serializationResult.getResult()).build();
        } catch (QueryException e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw e;
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw new RuntimeException(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        interruptedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotError(e);
    } catch (QueryTimeoutException timeout) {
        timedOutQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(timeout, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotTimeout(timeout);
    } catch (QueryCapacityExceededException cap) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(cap, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotLimited(cap);
    } catch (QueryUnsupportedException unsupported) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(unsupported, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotUnsupported(unsupported);
    } catch (BadJsonQueryException | ResourceLimitExceededException e) {
        interruptedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        return ioReaderWriter.getResponseWriter().gotBadQuery(e);
    } catch (ForbiddenException e) {
        // send an error response if this is thrown.
        throw e;
    } catch (Exception e) {
        failedQueryCount.incrementAndGet();
        queryLifecycle.emitLogsAndMetrics(e, req.getRemoteAddr(), -1);
        log.noStackTrace().makeAlert(e, "Exception handling request").addData("query", query != null ? jsonMapper.writeValueAsString(query) : "unparseable query").addData("peer", req.getRemoteAddr()).emit();
        return ioReaderWriter.getResponseWriter().gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(org.apache.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) CountingOutputStream(com.google.common.io.CountingOutputStream) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) TruncatedResponseContextException(org.apache.druid.query.TruncatedResponseContextException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) ForbiddenException(org.apache.druid.server.security.ForbiddenException) JsonParseException(com.fasterxml.jackson.core.JsonParseException) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) WebApplicationException(javax.ws.rs.WebApplicationException) QueryException(org.apache.druid.query.QueryException) BadQueryException(org.apache.druid.query.BadQueryException) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) TruncatedResponseContextException(org.apache.druid.query.TruncatedResponseContextException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) Response(javax.ws.rs.core.Response) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) BadJsonQueryException(org.apache.druid.query.BadJsonQueryException) QueryException(org.apache.druid.query.QueryException) BadQueryException(org.apache.druid.query.BadQueryException) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 4 with QueryUnsupportedException

use of org.apache.druid.query.QueryUnsupportedException in project druid by druid-io.

the class ServerManager method getQueryRunnerForSegments.

@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    if (factory == null) {
        final QueryUnsupportedException e = new QueryUnsupportedException(StringUtils.format("Unknown query type, [%s]", query.getClass()));
        log.makeAlert(e, "Error while executing a query[%s]", query.getId()).addData("dataSource", query.getDataSource()).emit();
        throw e;
    }
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    final DataSourceAnalysis analysis = DataSourceAnalysis.forDataSource(query.getDataSource());
    final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
    final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline;
    final Optional<VersionedIntervalTimeline<String, ReferenceCountingSegment>> maybeTimeline = segmentManager.getTimeline(analysis);
    // Make sure this query type can handle the subquery, if present.
    if (analysis.isQuery() && !toolChest.canPerformSubquery(((QueryDataSource) analysis.getDataSource()).getQuery())) {
        throw new ISE("Cannot handle subquery: %s", analysis.getDataSource());
    }
    if (maybeTimeline.isPresent()) {
        timeline = maybeTimeline.get();
    } else {
        return new ReportTimelineMissingSegmentQueryRunner<>(Lists.newArrayList(specs));
    }
    // segmentMapFn maps each base Segment into a joined Segment if necessary.
    final Function<SegmentReference, SegmentReference> segmentMapFn = joinableFactoryWrapper.createSegmentMapFn(analysis.getJoinBaseTableFilter().map(Filters::toFilter).orElse(null), analysis.getPreJoinableClauses(), cpuTimeAccumulator, analysis.getBaseQuery().orElse(query));
    // We compute the join cache key here itself so it doesn't need to be re-computed for every segment
    final Optional<byte[]> cacheKeyPrefix = analysis.isJoin() ? joinableFactoryWrapper.computeJoinDataSourceCacheKey(analysis) : Optional.of(StringUtils.EMPTY_BYTES);
    final FunctionalIterable<QueryRunner<T>> queryRunners = FunctionalIterable.create(specs).transformCat(descriptor -> Collections.singletonList(buildQueryRunnerForSegment(query, descriptor, factory, toolChest, timeline, segmentMapFn, cpuTimeAccumulator, cacheKeyPrefix)));
    return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(queryProcessingPool, queryRunners)), toolChest), toolChest, emitter, cpuTimeAccumulator, true);
}
Also used : ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) Query(org.apache.druid.query.Query) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) SegmentReference(org.apache.druid.segment.SegmentReference) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) NoopQueryRunner(org.apache.druid.query.NoopQueryRunner) SpecificSegmentQueryRunner(org.apache.druid.query.spec.SpecificSegmentQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) SetAndVerifyContextQueryRunner(org.apache.druid.server.SetAndVerifyContextQueryRunner) PerSegmentOptimizingQueryRunner(org.apache.druid.query.PerSegmentOptimizingQueryRunner) CachingQueryRunner(org.apache.druid.client.CachingQueryRunner) MetricsEmittingQueryRunner(org.apache.druid.query.MetricsEmittingQueryRunner) ReferenceCountingSegmentQueryRunner(org.apache.druid.query.ReferenceCountingSegmentQueryRunner) CPUTimeMetricQueryRunner(org.apache.druid.query.CPUTimeMetricQueryRunner) AtomicLong(java.util.concurrent.atomic.AtomicLong) Filters(org.apache.druid.segment.filter.Filters) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ISE(org.apache.druid.java.util.common.ISE)

Example 5 with QueryUnsupportedException

use of org.apache.druid.query.QueryUnsupportedException in project druid by druid-io.

the class ServerManagerForQueryErrorTest method buildQueryRunnerForSegment.

@Override
protected <T> QueryRunner<T> buildQueryRunnerForSegment(Query<T> query, SegmentDescriptor descriptor, QueryRunnerFactory<T, Query<T>> factory, QueryToolChest<T, Query<T>> toolChest, VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline, Function<SegmentReference, SegmentReference> segmentMapFn, AtomicLong cpuTimeAccumulator, Optional<byte[]> cacheKeyPrefix) {
    if (query.getContextBoolean(QUERY_RETRY_TEST_CONTEXT_KEY, false)) {
        final MutableBoolean isIgnoreSegment = new MutableBoolean(false);
        queryToIgnoredSegments.compute(query.getMostSpecificId(), (queryId, ignoredSegments) -> {
            if (ignoredSegments == null) {
                ignoredSegments = new HashSet<>();
            }
            if (ignoredSegments.size() < MAX_NUM_FALSE_MISSING_SEGMENTS_REPORTS) {
                ignoredSegments.add(descriptor);
                isIgnoreSegment.setTrue();
            }
            return ignoredSegments;
        });
        if (isIgnoreSegment.isTrue()) {
            LOG.info("Pretending I don't have segment[%s]", descriptor);
            return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
        }
    } else if (query.getContextBoolean(QUERY_TIMEOUT_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new QueryTimeoutException("query timeout test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new QueryTimeoutException("query timeout test");
            }
        };
    } else if (query.getContextBoolean(QUERY_CAPACITY_EXCEEDED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
            }
        };
    } else if (query.getContextBoolean(QUERY_UNSUPPORTED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new QueryUnsupportedException("query unsupported test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new QueryUnsupportedException("query unsupported test");
            }
        };
    } else if (query.getContextBoolean(RESOURCE_LIMIT_EXCEEDED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new ResourceLimitExceededException("resource limit exceeded test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new ResourceLimitExceededException("resource limit exceeded test");
            }
        };
    } else if (query.getContextBoolean(QUERY_FAILURE_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new RuntimeException("query failure test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new RuntimeException("query failure test");
            }
        };
    }
    return super.buildQueryRunnerForSegment(query, descriptor, factory, toolChest, timeline, segmentMapFn, cpuTimeAccumulator, cacheKeyPrefix);
}
Also used : Logger(org.apache.druid.java.util.common.logger.Logger) SegmentManager(org.apache.druid.server.SegmentManager) Inject(com.google.inject.Inject) Smile(org.apache.druid.guice.annotations.Smile) QueryProcessingPool(org.apache.druid.query.QueryProcessingPool) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) Function(java.util.function.Function) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) HashSet(java.util.HashSet) SegmentReference(org.apache.druid.segment.SegmentReference) Query(org.apache.druid.query.Query) QueryRunner(org.apache.druid.query.QueryRunner) CachePopulator(org.apache.druid.client.cache.CachePopulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ServerConfig(org.apache.druid.server.initialization.ServerConfig) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CacheConfig(org.apache.druid.client.cache.CacheConfig) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryToolChest(org.apache.druid.query.QueryToolChest) Set(java.util.Set) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) AtomicLong(java.util.concurrent.atomic.AtomicLong) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) Optional(java.util.Optional) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Cache(org.apache.druid.client.cache.Cache) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException)

Aggregations

QueryUnsupportedException (org.apache.druid.query.QueryUnsupportedException)9 Response (javax.ws.rs.core.Response)5 QueryException (org.apache.druid.query.QueryException)4 Test (org.junit.Test)4 QueryCapacityExceededException (org.apache.druid.query.QueryCapacityExceededException)3 QueryTimeoutException (org.apache.druid.query.QueryTimeoutException)3 ResourceLimitExceededException (org.apache.druid.query.ResourceLimitExceededException)3 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)2 CountingOutputStream (com.google.common.io.CountingOutputStream)2 IOException (java.io.IOException)2 AtomicLong (java.util.concurrent.atomic.AtomicLong)2 Consumes (javax.ws.rs.Consumes)2 POST (javax.ws.rs.POST)2 Produces (javax.ws.rs.Produces)2 StreamingOutput (javax.ws.rs.core.StreamingOutput)2 BadJsonQueryException (org.apache.druid.query.BadJsonQueryException)2 BadQueryException (org.apache.druid.query.BadQueryException)2 Query (org.apache.druid.query.Query)2 QueryInterruptedException (org.apache.druid.query.QueryInterruptedException)2 QueryRunner (org.apache.druid.query.QueryRunner)2