use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.
the class SetAndVerifyContextQueryRunnerTest method testTimeoutIsUsedIfTimeoutIsNonZero.
@Test
public void testTimeoutIsUsedIfTimeoutIsNonZero() throws InterruptedException {
Query<ScanResultValue> query = new Druids.ScanQueryBuilder().dataSource("foo").intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.ETERNITY))).context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1)).build();
ServerConfig defaultConfig = new ServerConfig();
QueryRunner<ScanResultValue> mockRunner = EasyMock.createMock(QueryRunner.class);
SetAndVerifyContextQueryRunner<ScanResultValue> queryRunner = new SetAndVerifyContextQueryRunner<>(defaultConfig, mockRunner);
Query<ScanResultValue> transformed = queryRunner.withTimeoutAndMaxScatterGatherBytes(query, defaultConfig);
Thread.sleep(100);
// timeout is set to 1, so withTimeoutAndMaxScatterGatherBytes should set QUERY_FAIL_TIME to be the current
// time + 1 at the time the method was called
// this means that after sleeping for 1 millis, the fail time should be less than the current time when checking
Assert.assertTrue(System.currentTimeMillis() > (Long) transformed.getContextValue(DirectDruidClient.QUERY_FAIL_TIME));
}
use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.
the class QueryResourceTest method testTooManyQuery.
@Test(timeout = 10_000L)
public void testTooManyQuery() throws InterruptedException {
expectPermissiveHappyPathAuth();
final CountDownLatch waitTwoScheduled = new CountDownLatch(2);
final CountDownLatch waitAllFinished = new CountDownLatch(3);
final QueryScheduler laningScheduler = new QueryScheduler(2, ManualQueryPrioritizationStrategy.INSTANCE, NoQueryLaningStrategy.INSTANCE, new ServerConfig());
createScheduledQueryResource(laningScheduler, Collections.emptyList(), ImmutableList.of(waitTwoScheduled));
assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus()));
waitTwoScheduled.await();
assertResponseAndCountdownOrBlockForever(SIMPLE_TIMESERIES_QUERY, waitAllFinished, response -> {
Assert.assertEquals(QueryCapacityExceededException.STATUS_CODE, response.getStatus());
QueryCapacityExceededException ex;
try {
ex = jsonMapper.readValue((byte[]) response.getEntity(), QueryCapacityExceededException.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
Assert.assertEquals(QueryCapacityExceededException.makeTotalErrorMessage(2), ex.getMessage());
Assert.assertEquals(QueryCapacityExceededException.ERROR_CODE, ex.getErrorCode());
});
waitAllFinished.await();
}
use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.
the class ServerManager method buildAndDecorateQueryRunner.
private <T> QueryRunner<T> buildAndDecorateQueryRunner(final QueryRunnerFactory<T, Query<T>> factory, final QueryToolChest<T, Query<T>> toolChest, final SegmentReference segment, final Optional<byte[]> cacheKeyPrefix, final SegmentDescriptor segmentDescriptor, final AtomicLong cpuTimeAccumulator) {
final SpecificSegmentSpec segmentSpec = new SpecificSegmentSpec(segmentDescriptor);
final SegmentId segmentId = segment.getId();
final Interval segmentInterval = segment.getDataInterval();
// If the segment is closed after this line, ReferenceCountingSegmentQueryRunner will handle and do the right thing.
if (segmentId == null || segmentInterval == null) {
return new ReportTimelineMissingSegmentQueryRunner<>(segmentDescriptor);
}
String segmentIdString = segmentId.toString();
MetricsEmittingQueryRunner<T> metricsEmittingQueryRunnerInner = new MetricsEmittingQueryRunner<>(emitter, toolChest, new ReferenceCountingSegmentQueryRunner<>(factory, segment, segmentDescriptor), QueryMetrics::reportSegmentTime, queryMetrics -> queryMetrics.segment(segmentIdString));
StorageAdapter storageAdapter = segment.asStorageAdapter();
long segmentMaxTime = storageAdapter.getMaxTime().getMillis();
long segmentMinTime = storageAdapter.getMinTime().getMillis();
Interval actualDataInterval = Intervals.utc(segmentMinTime, segmentMaxTime + 1);
CachingQueryRunner<T> cachingQueryRunner = new CachingQueryRunner<>(segmentIdString, cacheKeyPrefix, segmentDescriptor, actualDataInterval, objectMapper, cache, toolChest, metricsEmittingQueryRunnerInner, cachePopulator, cacheConfig);
BySegmentQueryRunner<T> bySegmentQueryRunner = new BySegmentQueryRunner<>(segmentId, segmentInterval.getStart(), cachingQueryRunner);
MetricsEmittingQueryRunner<T> metricsEmittingQueryRunnerOuter = new MetricsEmittingQueryRunner<>(emitter, toolChest, bySegmentQueryRunner, QueryMetrics::reportSegmentAndCacheTime, queryMetrics -> queryMetrics.segment(segmentIdString)).withWaitMeasuredFromNow();
SpecificSegmentQueryRunner<T> specificSegmentQueryRunner = new SpecificSegmentQueryRunner<>(metricsEmittingQueryRunnerOuter, segmentSpec);
PerSegmentOptimizingQueryRunner<T> perSegmentOptimizingQueryRunner = new PerSegmentOptimizingQueryRunner<>(specificSegmentQueryRunner, new PerSegmentQueryOptimizationContext(segmentDescriptor));
return new SetAndVerifyContextQueryRunner<>(serverConfig, CPUTimeMetricQueryRunner.safeBuild(perSegmentOptimizingQueryRunner, toolChest, emitter, cpuTimeAccumulator, false));
}
use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.
the class AsyncQueryForwardingServletTest method testHandleExceptionWithFilterDisabled.
@Test
public void testHandleExceptionWithFilterDisabled() throws Exception {
String errorMessage = "test exception message";
ObjectMapper mockMapper = Mockito.mock(ObjectMapper.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
ServletOutputStream outputStream = Mockito.mock(ServletOutputStream.class);
Mockito.when(response.getOutputStream()).thenReturn(outputStream);
final AsyncQueryForwardingServlet servlet = new AsyncQueryForwardingServlet(new MapQueryToolChestWarehouse(ImmutableMap.of()), mockMapper, TestHelper.makeSmileMapper(), null, null, null, new NoopServiceEmitter(), new NoopRequestLogger(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), new ServerConfig());
Exception testException = new IllegalStateException(errorMessage);
servlet.handleException(response, mockMapper, testException);
ArgumentCaptor<Exception> captor = ArgumentCaptor.forClass(Exception.class);
Mockito.verify(mockMapper).writeValue(ArgumentMatchers.eq(outputStream), captor.capture());
Assert.assertTrue(captor.getValue() instanceof QueryException);
Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, ((QueryException) captor.getValue()).getErrorCode());
Assert.assertEquals(errorMessage, captor.getValue().getMessage());
Assert.assertEquals(IllegalStateException.class.getName(), ((QueryException) captor.getValue()).getErrorClass());
}
use of org.apache.druid.server.initialization.ServerConfig in project druid by druid-io.
the class AsyncQueryForwardingServletTest method testHandleExceptionWithFilterEnabledButMessageMatchAllowedRegex.
@Test
public void testHandleExceptionWithFilterEnabledButMessageMatchAllowedRegex() throws Exception {
String errorMessage = "test exception message";
ObjectMapper mockMapper = Mockito.mock(ObjectMapper.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
ServletOutputStream outputStream = Mockito.mock(ServletOutputStream.class);
Mockito.when(response.getOutputStream()).thenReturn(outputStream);
final AsyncQueryForwardingServlet servlet = new AsyncQueryForwardingServlet(new MapQueryToolChestWarehouse(ImmutableMap.of()), mockMapper, TestHelper.makeSmileMapper(), null, null, null, new NoopServiceEmitter(), new NoopRequestLogger(), new DefaultGenericQueryMetricsFactory(), new AuthenticatorMapper(ImmutableMap.of()), new Properties(), new ServerConfig() {
@Override
public boolean isShowDetailedJettyErrors() {
return true;
}
@Override
public ErrorResponseTransformStrategy getErrorResponseTransformStrategy() {
return new AllowedRegexErrorResponseTransformStrategy(ImmutableList.of("test .*"));
}
});
Exception testException = new IllegalStateException(errorMessage);
servlet.handleException(response, mockMapper, testException);
ArgumentCaptor<Exception> captor = ArgumentCaptor.forClass(Exception.class);
Mockito.verify(mockMapper).writeValue(ArgumentMatchers.eq(outputStream), captor.capture());
Assert.assertTrue(captor.getValue() instanceof QueryException);
Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, ((QueryException) captor.getValue()).getErrorCode());
Assert.assertEquals(errorMessage, captor.getValue().getMessage());
Assert.assertNull(((QueryException) captor.getValue()).getErrorClass());
Assert.assertNull(((QueryException) captor.getValue()).getHost());
}
Aggregations