Search in sources :

Example 1 with ServerConfig

use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.

the class SqlResourceTest method setUp.

@Before
public void setUp() throws Exception {
    final QueryScheduler scheduler = new QueryScheduler(5, ManualQueryPrioritizationStrategy.INSTANCE, new HiLoQueryLaningStrategy(40), new ServerConfig()) {

        @Override
        public <T> Sequence<T> run(Query<?> query, Sequence<T> resultSequence) {
            return super.run(query, new LazySequence<T>(() -> {
                if (sleep) {
                    try {
                        // pretend to be a query that is waiting on results
                        Thread.sleep(500);
                    } catch (InterruptedException ignored) {
                    }
                }
                return resultSequence;
            }));
        }
    };
    executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(8, "test_sql_resource_%s"));
    walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder(), scheduler);
    final PlannerConfig plannerConfig = new PlannerConfig() {

        @Override
        public boolean shouldSerializeComplexValues() {
            return false;
        }
    };
    final DruidSchemaCatalog rootSchema = CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER);
    final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
    final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
    req = EasyMock.createStrictMock(HttpServletRequest.class);
    EasyMock.expect(req.getRemoteAddr()).andReturn(null).once();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
    EasyMock.expectLastCall().anyTimes();
    EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(CalciteTests.REGULAR_USER_AUTH_RESULT).anyTimes();
    EasyMock.replay(req);
    testRequestLogger = new TestRequestLogger();
    final PlannerFactory plannerFactory = new PlannerFactory(rootSchema, CalciteTests.createMockQueryMakerFactory(walker, conglomerate), operatorTable, macroTable, plannerConfig, CalciteTests.TEST_AUTHORIZER_MAPPER, CalciteTests.getJsonMapper(), CalciteTests.DRUID_SCHEMA_NAME);
    lifecycleManager = new SqlLifecycleManager() {

        @Override
        public void add(String sqlQueryId, SqlLifecycle lifecycle) {
            super.add(sqlQueryId, lifecycle);
            if (lifecycleAddLatch != null) {
                lifecycleAddLatch.countDown();
            }
        }
    };
    final ServiceEmitter emitter = new NoopServiceEmitter();
    sqlLifecycleFactory = new SqlLifecycleFactory(plannerFactory, emitter, testRequestLogger, scheduler) {

        @Override
        public SqlLifecycle factorize() {
            return new TestSqlLifecycle(plannerFactory, emitter, testRequestLogger, scheduler, System.currentTimeMillis(), System.nanoTime(), validateAndAuthorizeLatchSupplier, planLatchSupplier, executeLatchSupplier, sequenceMapFnSupplier);
        }
    };
    resource = new SqlResource(JSON_MAPPER, CalciteTests.TEST_AUTHORIZER_MAPPER, sqlLifecycleFactory, lifecycleManager, new ServerConfig());
}
Also used : SqlLifecycleManager(org.apache.druid.sql.SqlLifecycleManager) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) QueryScheduler(org.apache.druid.server.QueryScheduler) BaseQuery(org.apache.druid.query.BaseQuery) Query(org.apache.druid.query.Query) HiLoQueryLaningStrategy(org.apache.druid.server.scheduling.HiLoQueryLaningStrategy) SqlLifecycle(org.apache.druid.sql.SqlLifecycle) NoopServiceEmitter(org.apache.druid.server.metrics.NoopServiceEmitter) Sequence(org.apache.druid.java.util.common.guava.Sequence) LazySequence(org.apache.druid.java.util.common.guava.LazySequence) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) DruidOperatorTable(org.apache.druid.sql.calcite.planner.DruidOperatorTable) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) TestRequestLogger(org.apache.druid.server.log.TestRequestLogger) HttpServletRequest(javax.servlet.http.HttpServletRequest) ServerConfig(org.apache.druid.server.initialization.ServerConfig) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) DruidSchemaCatalog(org.apache.druid.sql.calcite.schema.DruidSchemaCatalog) PlannerFactory(org.apache.druid.sql.calcite.planner.PlannerFactory) SqlLifecycleFactory(org.apache.druid.sql.SqlLifecycleFactory) Before(org.junit.Before)

Example 2 with ServerConfig

use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.

the class SqlResourceTest method testUnsupportedQueryThrowsExceptionWithFilterResponse.

@Test
public void testUnsupportedQueryThrowsExceptionWithFilterResponse() throws Exception {
    resource = new SqlResource(JSON_MAPPER, CalciteTests.TEST_AUTHORIZER_MAPPER, sqlLifecycleFactory, lifecycleManager, new ServerConfig() {

        @Override
        public boolean isShowDetailedJettyErrors() {
            return true;
        }

        @Override
        public ErrorResponseTransformStrategy getErrorResponseTransformStrategy() {
            return new AllowedRegexErrorResponseTransformStrategy(ImmutableList.of());
        }
    });
    String errorMessage = "This will be support in Druid 9999";
    SqlQuery badQuery = EasyMock.createMock(SqlQuery.class);
    EasyMock.expect(badQuery.getQuery()).andReturn("SELECT ANSWER TO LIFE");
    EasyMock.expect(badQuery.getContext()).andReturn(ImmutableMap.of("sqlQueryId", "id"));
    EasyMock.expect(badQuery.getParameterList()).andThrow(new QueryUnsupportedException(errorMessage));
    EasyMock.replay(badQuery);
    final QueryException exception = doPost(badQuery).lhs;
    Assert.assertNotNull(exception);
    Assert.assertNull(exception.getMessage());
    Assert.assertNull(exception.getHost());
    Assert.assertEquals(exception.getErrorCode(), QueryUnsupportedException.ERROR_CODE);
    Assert.assertNull(exception.getErrorClass());
    Assert.assertTrue(lifecycleManager.getAll("id").isEmpty());
}
Also used : ServerConfig(org.apache.druid.server.initialization.ServerConfig) UnsupportedSQLQueryException(org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException) QueryException(org.apache.druid.query.QueryException) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) AllowedRegexErrorResponseTransformStrategy(org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy) Test(org.junit.Test)

Example 3 with ServerConfig

use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.

the class QueryResourceTest method testTooManyQuery.

@Test(timeout = 10_000L)
public void testTooManyQuery() throws InterruptedException {
    expectPermissiveHappyPathAuth();
    final CountDownLatch waitTwoScheduled = new CountDownLatch(2);
    final CountDownLatch waitAllFinished = new CountDownLatch(3);
    final QueryScheduler laningScheduler = new QueryScheduler(2, ManualQueryPrioritizationStrategy.INSTANCE, NoQueryLaningStrategy.INSTANCE, new ServerConfig());
    createScheduledQueryResource(laningScheduler, Collections.emptyList(), ImmutableList.of(waitTwoScheduled));
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
    waitTwoScheduled.await();
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> {
        Assert.assertEquals(QueryCapacityExceededException.STATUS_CODE, response.getStatus());
        QueryCapacityExceededException ex;
        try {
            ex = jsonMapper.readValue((byte[]) response.getEntity(), QueryCapacityExceededException.class);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        Assert.assertEquals(QueryCapacityExceededException.makeTotalErrorMessage(2), ex.getMessage());
        Assert.assertEquals(QueryCapacityExceededException.ERROR_CODE, ex.getErrorCode());
    });
    waitAllFinished.await();
}
Also used : ServerConfig(org.apache.druid.server.initialization.ServerConfig) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.junit.Test)

Example 4 with ServerConfig

use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.

the class QueryResourceTest method testTooManyQueryInLane.

@Test(timeout = 10_000L)
public void testTooManyQueryInLane() throws InterruptedException {
    expectPermissiveHappyPathAuth();
    final CountDownLatch waitTwoStarted = new CountDownLatch(2);
    final CountDownLatch waitOneScheduled = new CountDownLatch(1);
    final CountDownLatch waitAllFinished = new CountDownLatch(3);
    final QueryScheduler scheduler = new QueryScheduler(40, ManualQueryPrioritizationStrategy.INSTANCE, new HiLoQueryLaningStrategy(2), new ServerConfig());
    createScheduledQueryResource(scheduler, ImmutableList.of(waitTwoStarted), ImmutableList.of(waitOneScheduled));
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY_LOW_PRIORITY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
    waitOneScheduled.await();
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY_LOW_PRIORITY, waitAllFinished, response -> {
        Assert.assertEquals(QueryCapacityExceededException.STATUS_CODE, response.getStatus());
        QueryCapacityExceededException ex;
        try {
            ex = jsonMapper.readValue((byte[]) response.getEntity(), QueryCapacityExceededException.class);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        Assert.assertEquals(QueryCapacityExceededException.makeLaneErrorMessage(HiLoQueryLaningStrategy.LOW, 1), ex.getMessage());
        Assert.assertEquals(QueryCapacityExceededException.ERROR_CODE, ex.getErrorCode());
    });
    waitTwoStarted.await();
    assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
    waitAllFinished.await();
}
Also used : ServerConfig(org.apache.druid.server.initialization.ServerConfig) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) HiLoQueryLaningStrategy(org.apache.druid.server.scheduling.HiLoQueryLaningStrategy) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) Test(org.junit.Test)

Example 5 with ServerConfig

use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.

the class ClientQuerySegmentWalkerTest method initWalker.

/**
 * Initialize (or reinitialize) our {@link #walker} and {@link #closer}.
 */
private void initWalker(final Map<String, String> serverProperties, QueryScheduler schedulerForTest) {
    final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
    final ServerConfig serverConfig = jsonMapper.convertValue(serverProperties, ServerConfig.class);
    final SegmentWrangler segmentWrangler = new MapSegmentWrangler(ImmutableMap.<Class<? extends DataSource>, SegmentWrangler>builder().put(InlineDataSource.class, new InlineSegmentWrangler()).build());
    final JoinableFactory globalFactory = new JoinableFactory() {

        @Override
        public boolean isDirectlyJoinable(DataSource dataSource) {
            return ((GlobalTableDataSource) dataSource).getName().equals(GLOBAL);
        }

        @Override
        public Optional<Joinable> build(DataSource dataSource, JoinConditionAnalysis condition) {
            return Optional.empty();
        }
    };
    final JoinableFactory joinableFactory = new MapJoinableFactory(ImmutableSet.of(globalFactory, new InlineJoinableFactory()), ImmutableMap.<Class<? extends JoinableFactory>, Class<? extends DataSource>>builder().put(InlineJoinableFactory.class, InlineDataSource.class).put(globalFactory.getClass(), GlobalTableDataSource.class).build());
    class CapturingWalker implements QuerySegmentWalker {

        private QuerySegmentWalker baseWalker;

        private ClusterOrLocal how;

        CapturingWalker(QuerySegmentWalker baseWalker, ClusterOrLocal how) {
            this.baseWalker = baseWalker;
            this.how = how;
        }

        @Override
        public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals) {
            final QueryRunner<T> baseRunner = baseWalker.getQueryRunnerForIntervals(query, intervals);
            return (queryPlus, responseContext) -> {
                log.info("Query (%s): %s", how, queryPlus.getQuery());
                issuedQueries.add(new ExpectedQuery(queryPlus.getQuery(), how));
                return baseRunner.run(queryPlus, responseContext);
            };
        }

        @Override
        public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
            final QueryRunner<T> baseRunner = baseWalker.getQueryRunnerForSegments(query, specs);
            return (queryPlus, responseContext) -> {
                log.info("Query (%s): %s", how, queryPlus.getQuery());
                issuedQueries.add(new ExpectedQuery(queryPlus.getQuery(), how));
                return baseRunner.run(queryPlus, responseContext);
            };
        }
    }
    walker = QueryStackTests.createClientQuerySegmentWalker(new CapturingWalker(QueryStackTests.createClusterQuerySegmentWalker(ImmutableMap.<String, VersionedIntervalTimeline<String, ReferenceCountingSegment>>builder().put(FOO, makeTimeline(FOO, FOO_INLINE)).put(BAR, makeTimeline(BAR, BAR_INLINE)).put(MULTI, makeTimeline(MULTI, MULTI_VALUE_INLINE)).put(GLOBAL, makeTimeline(GLOBAL, FOO_INLINE)).put(ARRAY, makeTimeline(ARRAY, ARRAY_INLINE)).put(ARRAY_UNKNOWN, makeTimeline(ARRAY_UNKNOWN, ARRAY_INLINE_UNKNOWN)).build(), joinableFactory, conglomerate, schedulerForTest), ClusterOrLocal.CLUSTER), new CapturingWalker(QueryStackTests.createLocalQuerySegmentWalker(conglomerate, segmentWrangler, joinableFactory, schedulerForTest), ClusterOrLocal.LOCAL), conglomerate, joinableFactory, serverConfig);
}
Also used : QueryToolChestTestHelper(org.apache.druid.query.QueryToolChestTestHelper) QueryPlus(org.apache.druid.query.QueryPlus) Arrays(java.util.Arrays) RowBasedSegment(org.apache.druid.segment.RowBasedSegment) SegmentWrangler(org.apache.druid.segment.SegmentWrangler) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Druids(org.apache.druid.query.Druids) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) After(org.junit.After) Map(java.util.Map) QueryRunner(org.apache.druid.query.QueryRunner) ManualQueryPrioritizationStrategy(org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy) Sequence(org.apache.druid.java.util.common.guava.Sequence) ShardSpec(org.apache.druid.timeline.partition.ShardSpec) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Closer(org.apache.druid.java.util.common.io.Closer) DataSource(org.apache.druid.query.DataSource) NumberedShardSpec(org.apache.druid.timeline.partition.NumberedShardSpec) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) JoinConditionAnalysis(org.apache.druid.segment.join.JoinConditionAnalysis) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) QueryContexts(org.apache.druid.query.QueryContexts) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) Objects(java.util.Objects) InlineJoinableFactory(org.apache.druid.segment.join.InlineJoinableFactory) QueryDataSource(org.apache.druid.query.QueryDataSource) List(java.util.List) MapSegmentWrangler(org.apache.druid.segment.MapSegmentWrangler) Optional(java.util.Optional) SegmentId(org.apache.druid.timeline.SegmentId) Logger(org.apache.druid.java.util.common.logger.Logger) ComparableList(org.apache.druid.segment.data.ComparableList) Joinable(org.apache.druid.segment.join.Joinable) Intervals(org.apache.druid.java.util.common.Intervals) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) JoinType(org.apache.druid.segment.join.JoinType) InlineSegmentWrangler(org.apache.druid.segment.InlineSegmentWrangler) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) DirectDruidClient(org.apache.druid.client.DirectDruidClient) ScanQuery(org.apache.druid.query.scan.ScanQuery) TopNQuery(org.apache.druid.query.topn.TopNQuery) ArrayList(java.util.ArrayList) Interval(org.joda.time.Interval) ImmutableList(com.google.common.collect.ImmutableList) Query(org.apache.druid.query.Query) JoinDataSource(org.apache.druid.query.JoinDataSource) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) ComparableStringArray(org.apache.druid.segment.data.ComparableStringArray) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) QuerySegmentWalker(org.apache.druid.query.QuerySegmentWalker) ExpectedException(org.junit.rules.ExpectedException) Sequences(org.apache.druid.java.util.common.guava.Sequences) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Before(org.junit.Before) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) InlineDataSource(org.apache.druid.query.InlineDataSource) GroupByStrategyV2(org.apache.druid.query.groupby.strategy.GroupByStrategyV2) ResponseContext(org.apache.druid.query.context.ResponseContext) ServerConfig(org.apache.druid.server.initialization.ServerConfig) TopNQueryBuilder(org.apache.druid.query.topn.TopNQueryBuilder) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) Test(org.junit.Test) IOException(java.io.IOException) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) TableDataSource(org.apache.druid.query.TableDataSource) Granularities(org.apache.druid.java.util.common.granularity.Granularities) TestHelper(org.apache.druid.segment.TestHelper) Rule(org.junit.Rule) UnionDataSource(org.apache.druid.query.UnionDataSource) NullHandling(org.apache.druid.common.config.NullHandling) RowSignature(org.apache.druid.segment.column.RowSignature) GroupByQueryHelper(org.apache.druid.query.groupby.GroupByQueryHelper) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) ColumnType(org.apache.druid.segment.column.ColumnType) NoQueryLaningStrategy(org.apache.druid.server.scheduling.NoQueryLaningStrategy) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Assert(org.junit.Assert) Comparator(java.util.Comparator) Collections(java.util.Collections) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ScanQuery(org.apache.druid.query.scan.ScanQuery) TopNQuery(org.apache.druid.query.topn.TopNQuery) Query(org.apache.druid.query.Query) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) JoinConditionAnalysis(org.apache.druid.segment.join.JoinConditionAnalysis) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) InlineJoinableFactory(org.apache.druid.segment.join.InlineJoinableFactory) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) DataSource(org.apache.druid.query.DataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) JoinDataSource(org.apache.druid.query.JoinDataSource) InlineDataSource(org.apache.druid.query.InlineDataSource) TableDataSource(org.apache.druid.query.TableDataSource) UnionDataSource(org.apache.druid.query.UnionDataSource) QuerySegmentWalker(org.apache.druid.query.QuerySegmentWalker) InlineSegmentWrangler(org.apache.druid.segment.InlineSegmentWrangler) ServerConfig(org.apache.druid.server.initialization.ServerConfig) SegmentWrangler(org.apache.druid.segment.SegmentWrangler) MapSegmentWrangler(org.apache.druid.segment.MapSegmentWrangler) InlineSegmentWrangler(org.apache.druid.segment.InlineSegmentWrangler) InlineDataSource(org.apache.druid.query.InlineDataSource) Joinable(org.apache.druid.segment.join.Joinable) MapSegmentWrangler(org.apache.druid.segment.MapSegmentWrangler) InlineJoinableFactory(org.apache.druid.segment.join.InlineJoinableFactory) MapJoinableFactory(org.apache.druid.segment.join.MapJoinableFactory) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

ServerConfig (org.apache.druid.server.initialization.ServerConfig)33 Test (org.junit.Test)26 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)11 IOException (java.io.IOException)11 NoopServiceEmitter (org.apache.druid.server.metrics.NoopServiceEmitter)11 AllowedRegexErrorResponseTransformStrategy (org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy)10 QueryException (org.apache.druid.query.QueryException)10 QueryInterruptedException (org.apache.druid.query.QueryInterruptedException)9 Properties (java.util.Properties)7 HttpServletResponse (javax.servlet.http.HttpServletResponse)7 DefaultGenericQueryMetricsFactory (org.apache.druid.query.DefaultGenericQueryMetricsFactory)7 MapQueryToolChestWarehouse (org.apache.druid.query.MapQueryToolChestWarehouse)7 NoopRequestLogger (org.apache.druid.server.log.NoopRequestLogger)7 AuthenticatorMapper (org.apache.druid.server.security.AuthenticatorMapper)7 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)6 ServletOutputStream (javax.servlet.ServletOutputStream)6 HttpServletRequest (javax.servlet.http.HttpServletRequest)5 Before (org.junit.Before)5 ExprMacroTable (org.apache.druid.math.expr.ExprMacroTable)4 Query (org.apache.druid.query.Query)4