use of io.druid.query.Query in project druid by druid-io.
the class CachingQueryRunnerTest method testUseCache.
private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
DefaultObjectMapper objectMapper = new DefaultObjectMapper();
String segmentIdentifier = "segment";
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
Cache cache = MapCache.create(1024 * 1024);
CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.empty();
}
}, backgroundExecutorService, new CacheConfig() {
@Override
public boolean isPopulateCache() {
return true;
}
@Override
public boolean isUseCache() {
return true;
}
});
HashMap<String, Object> context = new HashMap<String, Object>();
List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
Assert.assertEquals(expectedResults.toString(), results.toString());
}
use of io.druid.query.Query in project druid by druid-io.
the class AggregationTestHelper method makeStringSerdeQueryRunner.
public QueryRunner<Row> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final Query<Row> query, final QueryRunner<Row> baseRunner) {
return new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> map) {
try {
Sequence<Row> resultSeq = baseRunner.run(query, Maps.<String, Object>newHashMap());
final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {
@Override
public Object accumulate(Object accumulated, Object in) {
yield();
return in;
}
});
String resultStr = mapper.writer().writeValueAsString(yielder);
TypeFactory typeFactory = mapper.getTypeFactory();
JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference());
List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
return Sequences.simple(resultRows);
} catch (Exception ex) {
throw Throwables.propagate(ex);
}
}
};
}
use of io.druid.query.Query in project druid by druid-io.
the class QueryResourceTest method testDenySecuredGetServer.
@Test(timeout = 60_000L)
public void testDenySecuredGetServer() throws Exception {
final CountDownLatch waitForCancellationLatch = new CountDownLatch(1);
final CountDownLatch waitFinishLatch = new CountDownLatch(2);
final CountDownLatch startAwaitLatch = new CountDownLatch(1);
EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn(new AuthorizationInfo() {
@Override
public Access isAuthorized(Resource resource, Action action) {
// WRITE corresponds to cancellation of query
if (action.equals(Action.READ)) {
try {
waitForCancellationLatch.await();
} catch (InterruptedException e) {
Throwables.propagate(e);
}
return new Access(true);
} else {
// Deny access to cancel the query
return new Access(false);
}
}
}).times(2);
EasyMock.replay(testServletRequest);
queryResource = new QueryResource(warehouse, serverConfig, jsonMapper, jsonMapper, testSegmentWalker, new NoopServiceEmitter(), new NoopRequestLogger(), queryManager, new AuthConfig(true));
final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," + "\"context\":{\"queryId\":\"id_1\"}}";
ObjectMapper mapper = new DefaultObjectMapper();
Query query = mapper.readValue(queryString, Query.class);
ListenableFuture future = MoreExecutors.listeningDecorator(Execs.singleThreaded("test_query_resource_%s")).submit(new Runnable() {
@Override
public void run() {
try {
startAwaitLatch.countDown();
Response response = queryResource.doPost(new ByteArrayInputStream(queryString.getBytes("UTF-8")), null, testServletRequest);
Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
} catch (IOException e) {
Throwables.propagate(e);
}
waitFinishLatch.countDown();
}
});
queryManager.registerQuery(query, future);
startAwaitLatch.await();
Executors.newSingleThreadExecutor().submit(new Runnable() {
@Override
public void run() {
Response response = queryResource.getServer("id_1", testServletRequest);
Assert.assertEquals(Response.Status.FORBIDDEN.getStatusCode(), response.getStatus());
waitForCancellationLatch.countDown();
waitFinishLatch.countDown();
}
});
waitFinishLatch.await();
}
use of io.druid.query.Query in project druid by druid-io.
the class QueryResourceTest method testSecuredGetServer.
@Test(timeout = 60_000L)
public void testSecuredGetServer() throws Exception {
final CountDownLatch waitForCancellationLatch = new CountDownLatch(1);
final CountDownLatch waitFinishLatch = new CountDownLatch(2);
final CountDownLatch startAwaitLatch = new CountDownLatch(1);
final CountDownLatch cancelledCountDownLatch = new CountDownLatch(1);
EasyMock.expect(testServletRequest.getAttribute(EasyMock.anyString())).andReturn(new AuthorizationInfo() {
@Override
public Access isAuthorized(Resource resource, Action action) {
// WRITE corresponds to cancellation of query
if (action.equals(Action.READ)) {
try {
// Countdown startAwaitLatch as we want query cancellation to happen
// after we enter isAuthorized method so that we can handle the
// InterruptedException here because of query cancellation
startAwaitLatch.countDown();
waitForCancellationLatch.await();
} catch (InterruptedException e) {
// When the query is cancelled the control will reach here,
// countdown the latch and rethrow the exception so that error response is returned for the query
cancelledCountDownLatch.countDown();
Throwables.propagate(e);
}
return new Access(true);
} else {
return new Access(true);
}
}
}).times(2);
EasyMock.replay(testServletRequest);
queryResource = new QueryResource(warehouse, serverConfig, jsonMapper, jsonMapper, testSegmentWalker, new NoopServiceEmitter(), new NoopRequestLogger(), queryManager, new AuthConfig(true));
final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," + "\"context\":{\"queryId\":\"id_1\"}}";
ObjectMapper mapper = new DefaultObjectMapper();
Query query = mapper.readValue(queryString, Query.class);
ListenableFuture future = MoreExecutors.listeningDecorator(Execs.singleThreaded("test_query_resource_%s")).submit(new Runnable() {
@Override
public void run() {
try {
Response response = queryResource.doPost(new ByteArrayInputStream(queryString.getBytes("UTF-8")), null, testServletRequest);
Assert.assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus());
} catch (IOException e) {
Throwables.propagate(e);
}
waitFinishLatch.countDown();
}
});
queryManager.registerQuery(query, future);
startAwaitLatch.await();
Executors.newSingleThreadExecutor().submit(new Runnable() {
@Override
public void run() {
Response response = queryResource.getServer("id_1", testServletRequest);
Assert.assertEquals(Response.Status.ACCEPTED.getStatusCode(), response.getStatus());
waitForCancellationLatch.countDown();
waitFinishLatch.countDown();
}
});
waitFinishLatch.await();
cancelledCountDownLatch.await();
}
use of io.druid.query.Query in project druid by druid-io.
the class GroupByQueryRunnerTest method testPostAggMergedHavingSpec.
@Test
public void testPostAggMergedHavingSpec() {
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "index", 4420L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6L + 4420L + 1L)), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "index", 4416L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6L + 4416L + 1L)));
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index"))).setPostAggregatorSpecs(ImmutableList.<PostAggregator>of(QueryRunnerTestHelper.addRowsIndexConstant)).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.<HavingSpec>of(new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L))));
final GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
// simulate two daily segments
final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
}
});
Map<String, Object> context = Maps.newHashMap();
TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged");
}
Aggregations