use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchWithCardinality.
@Test
public void testSearchWithCardinality() {
final SearchQuery searchQuery = Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.fullOnInterval).query("a").build();
// double the value
QueryRunner mergedRunner = toolChest.mergeResults(new QueryRunner<Result<SearchResultValue>>() {
@Override
public Sequence<Result<SearchResultValue>> run(Query<Result<SearchResultValue>> query, Map<String, Object> responseContext) {
final Query<Result<SearchResultValue>> query1 = searchQuery.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-01-12/2011-02-28"))));
final Query<Result<SearchResultValue>> query2 = searchQuery.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-03-01/2011-04-15"))));
return Sequences.concat(runner.run(query1, responseContext), runner.run(query2, responseContext));
}
});
List<SearchHit> expectedHits = Lists.newLinkedList();
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 91));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 273));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 91));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 91));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 91));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 182));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 91));
expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 182));
checkSearchQuery(searchQuery, mergedRunner, expectedHits);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class DumpSegment method executeQuery.
private static <T> Sequence<T> executeQuery(final Injector injector, final QueryableIndex index, final Query<T> query) {
final QueryRunnerFactoryConglomerate conglomerate = injector.getInstance(QueryRunnerFactoryConglomerate.class);
final QueryRunnerFactory factory = conglomerate.findFactory(query);
final QueryRunner<T> runner = factory.createRunner(new QueryableIndexSegment("segment", index));
final Sequence results = factory.getToolchest().mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.<QueryRunner>of(runner))).run(query, Maps.<String, Object>newHashMap());
return (Sequence<T>) results;
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class AsyncQueryRunnerTest method testAsyncNature.
@Test(timeout = TEST_TIMEOUT)
public void testAsyncNature() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
QueryRunner baseRunner = new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
try {
latch.await();
return Sequences.simple(Lists.newArrayList(1));
} catch (InterruptedException ex) {
throw Throwables.propagate(ex);
}
}
};
AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, QueryRunnerTestHelper.NOOP_QUERYWATCHER);
Sequence lazy = asyncRunner.run(query, Collections.EMPTY_MAP);
latch.countDown();
Assert.assertEquals(Lists.newArrayList(1), Sequences.toList(lazy, Lists.newArrayList()));
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class ChainedExecutionQueryRunnerTest method testQueryCancellation.
@Test(timeout = 60000)
public void testQueryCancellation() throws Exception {
ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int getNumThreads() {
return 2;
}
});
final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = EasyMock.newCapture();
QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
queryIsRegistered.countDown();
return null;
}
}).once();
EasyMock.replay(watcher);
ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
Map<String, Object> context = ImmutableMap.<String, Object>of();
final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).build(), context);
Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {
@Override
public void run() {
Sequences.toList(seq, Lists.newArrayList());
}
});
// wait for query to register and start
queryIsRegistered.await();
queriesStarted.await();
// cancel the query
Assert.assertTrue(capturedFuture.hasCaptured());
ListenableFuture future = capturedFuture.getValue();
future.cancel(true);
QueryInterruptedException cause = null;
try {
resultFuture.get();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
cause = (QueryInterruptedException) e.getCause();
}
queriesInterrupted.await();
Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled());
DyingQueryRunner interrupted1 = interrupted.poll();
synchronized (interrupted1) {
Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
}
DyingQueryRunner interrupted2 = interrupted.poll();
synchronized (interrupted2) {
Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
}
runners.remove(interrupted1);
runners.remove(interrupted2);
DyingQueryRunner remainingRunner = runners.iterator().next();
synchronized (remainingRunner) {
Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
}
Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
EasyMock.verify(watcher);
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class QueryResource method doPost.
@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
final long start = System.currentTimeMillis();
Query query = null;
QueryToolChest toolChest = null;
String queryId = null;
final ResponseContext context = createContext(req.getContentType(), pretty != null);
final String currThreadName = Thread.currentThread().getName();
try {
query = context.getObjectMapper().readValue(in, Query.class);
queryId = query.getId();
if (queryId == null) {
queryId = UUID.randomUUID().toString();
query = query.withId(queryId);
}
if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
}
toolChest = warehouse.getToolChest(query);
Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
if (log.isDebugEnabled()) {
log.debug("Got query [%s]", query);
}
if (authConfig.isEnabled()) {
// This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
if (authorizationInfo != null) {
for (String dataSource : query.getDataSource().getNames()) {
Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
if (!authResult.isAllowed()) {
return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
}
}
} else {
throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
}
}
String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
if (prevEtag != null) {
query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
}
final Map<String, Object> responseContext = new MapMaker().makeMap();
final Sequence res = query.run(texasRanger, responseContext);
if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
return Response.notModified().build();
}
final Sequence results;
if (res == null) {
results = Sequences.empty();
} else {
results = res;
}
final Yielder yielder = Yielders.each(results);
try {
final Query theQuery = query;
final QueryToolChest theToolChest = toolChest;
final ObjectWriter jsonWriter = context.newOutputWriter();
Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {
@Override
public void write(OutputStream outputStream) throws IOException, WebApplicationException {
try {
// json serializer will always close the yielder
CountingOutputStream os = new CountingOutputStream(outputStream);
jsonWriter.writeValue(os, yielder);
// Some types of OutputStream suppress flush errors in the .close() method.
os.flush();
os.close();
successfulQueryCount.incrementAndGet();
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
} finally {
Thread.currentThread().setName(currThreadName);
}
}
}, context.getContentType()).header("X-Druid-Query-Id", queryId);
if (responseContext.get(HDR_ETAG) != null) {
builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
responseContext.remove(HDR_ETAG);
}
//Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
//Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
//and encodes the string using ASCII, so 1 char is = 1 byte
String responseCtxString = jsonMapper.writeValueAsString(responseContext);
if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
}
return builder.header("X-Druid-Response-Context", responseCtxString).build();
} catch (Exception e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder.close();
throw Throwables.propagate(e);
} finally {
// do not close yielder here, since we do not want to close the yielder prior to
// StreamingOutput having iterated over all the results
}
} catch (QueryInterruptedException e) {
try {
log.warn(e, "Exception while processing queryId [%s]", queryId);
interruptedQueryCount.incrementAndGet();
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
} catch (Exception e2) {
log.error(e2, "Unable to log query [%s]!", query);
}
return context.gotError(e);
} catch (Exception e) {
// Input stream has already been consumed by the json object mapper if query == null
final String queryString = query == null ? "unparsable query" : query.toString();
log.warn(e, "Exception occurred on request [%s]", queryString);
failedQueryCount.incrementAndGet();
try {
final long queryTime = System.currentTimeMillis() - start;
emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
} catch (Exception e2) {
log.error(e2, "Unable to log query [%s]!", queryString);
}
log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
return context.gotError(e);
} finally {
Thread.currentThread().setName(currThreadName);
}
}
Aggregations