use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class QueriesTest method testVerifyAggregations.
@Test
public void testVerifyAggregations() throws Exception {
List<AggregatorFactory> aggFactories = Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue"));
List<PostAggregator> postAggs = Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("addStuff", "+", Arrays.<PostAggregator>asList(new FieldAccessPostAggregator("idx", "idx"), new FieldAccessPostAggregator("count", "count"))));
boolean exceptionOccured = false;
try {
Queries.prepareAggregations(aggFactories, postAggs);
} catch (IllegalArgumentException e) {
exceptionOccured = true;
}
Assert.assertFalse(exceptionOccured);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class QueriesTest method testVerifyAggregationsMultiLevel.
@Test
public void testVerifyAggregationsMultiLevel() throws Exception {
List<AggregatorFactory> aggFactories = Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue"));
List<PostAggregator> postAggs = Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("divideStuff", "/", Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("addStuff", "+", Arrays.asList(new FieldAccessPostAggregator("idx", "idx"), new ConstantPostAggregator("const", 1))), new ArithmeticPostAggregator("subtractStuff", "-", Arrays.asList(new FieldAccessPostAggregator("rev", "rev"), new ConstantPostAggregator("const", 1))))), new ArithmeticPostAggregator("addStuff", "+", Arrays.<PostAggregator>asList(new FieldAccessPostAggregator("divideStuff", "divideStuff"), new FieldAccessPostAggregator("count", "count"))));
boolean exceptionOccured = false;
try {
Queries.prepareAggregations(aggFactories, postAggs);
} catch (IllegalArgumentException e) {
exceptionOccured = true;
}
Assert.assertFalse(exceptionOccured);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class QueriesTest method testVerifyAggregationsMissingVal.
@Test
public void testVerifyAggregationsMissingVal() throws Exception {
List<AggregatorFactory> aggFactories = Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("idx", "index"), new DoubleSumAggregatorFactory("rev", "revenue"));
List<PostAggregator> postAggs = Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("addStuff", "+", Arrays.<PostAggregator>asList(new FieldAccessPostAggregator("idx", "idx2"), new FieldAccessPostAggregator("count", "count"))));
boolean exceptionOccured = false;
try {
Queries.prepareAggregations(aggFactories, postAggs);
} catch (IllegalArgumentException e) {
exceptionOccured = true;
}
Assert.assertTrue(exceptionOccured);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class ChainedExecutionQueryRunnerTest method testQueryTimeout.
@Test(timeout = 60000)
public void testQueryTimeout() throws Exception {
ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {
@Override
public String getFormatString() {
return "test";
}
@Override
public int getNumThreads() {
return 2;
}
});
final CountDownLatch queriesStarted = new CountDownLatch(2);
final CountDownLatch queriesInterrupted = new CountDownLatch(2);
final CountDownLatch queryIsRegistered = new CountDownLatch(1);
Capture<ListenableFuture> capturedFuture = new Capture<>();
QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
watcher.registerQuery(EasyMock.<Query>anyObject(), EasyMock.and(EasyMock.<ListenableFuture>anyObject(), EasyMock.capture(capturedFuture)));
EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
@Override
public Void answer() throws Throwable {
queryIsRegistered.countDown();
return null;
}
}).once();
EasyMock.replay(watcher);
ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(exec, watcher, Lists.<QueryRunner<Integer>>newArrayList(runners));
HashMap<String, Object> context = new HashMap<String, Object>();
final Sequence seq = chainedRunner.run(Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Lists.<AggregatorFactory>newArrayList(new CountAggregatorFactory("count"))).context(ImmutableMap.<String, Object>of(QueryContextKeys.TIMEOUT, 100, "queryId", "test")).build(), context);
Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {
@Override
public void run() {
Sequences.toList(seq, Lists.newArrayList());
}
});
// wait for query to register and start
queryIsRegistered.await();
queriesStarted.await();
Assert.assertTrue(capturedFuture.hasCaptured());
ListenableFuture future = capturedFuture.getValue();
// wait for query to time out
QueryInterruptedException cause = null;
try {
resultFuture.get();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
Assert.assertEquals("Query timeout", ((QueryInterruptedException) e.getCause()).getErrorCode());
cause = (QueryInterruptedException) e.getCause();
}
queriesInterrupted.await();
Assert.assertNotNull(cause);
Assert.assertTrue(future.isCancelled());
DyingQueryRunner interrupted1 = interrupted.poll();
synchronized (interrupted1) {
Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
}
DyingQueryRunner interrupted2 = interrupted.poll();
synchronized (interrupted2) {
Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
}
runners.remove(interrupted1);
runners.remove(interrupted2);
DyingQueryRunner remainingRunner = runners.iterator().next();
synchronized (remainingRunner) {
Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
}
Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
EasyMock.verify(watcher);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class MultiValuedDimensionTest method testGroupByNoFilter.
@Test
public void testGroupByNoFilter() throws Exception {
GroupByQuery query = GroupByQuery.builder().setDataSource("xx").setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")).setGranularity(Granularities.ALL).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("tags", "tags"))).setAggregatorSpecs(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).build();
Sequence<Row> result = helper.runQueryOnSegmentsObjs(ImmutableList.<Segment>of(new QueryableIndexSegment("sid1", queryableIndex), new IncrementalIndexSegment(incrementalIndex, "sid2")), query);
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", null, "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t2", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t3", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t4", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t5", "count", 4L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t6", "count", 2L), GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t7", "count", 2L));
TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Row>()), "");
}
Aggregations