use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TieredBrokerHostSelectorTest method testSelectMultiInterval2.
@Test
public void testSelectMultiInterval2() throws Exception {
String brokerName = (String) brokerSelector.select(Druids.newTimeseriesQueryBuilder().dataSource("test").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).intervals(new MultipleIntervalSegmentSpec(Arrays.<Interval>asList(new Interval("2011-08-31/2011-09-01"), new Interval("2012-08-31/2012-09-01"), new Interval("2013-08-31/2013-09-01")))).build()).lhs;
Assert.assertEquals("coldBroker", brokerName);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TieredBrokerHostSelectorTest method testBasicSelect.
@Test
public void testBasicSelect() throws Exception {
String brokerName = (String) brokerSelector.select(Druids.newTimeseriesQueryBuilder().dataSource("test").granularity("all").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"))).intervals(Arrays.<Interval>asList(new Interval("2011-08-31/2011-09-01"))).build()).lhs;
Assert.assertEquals("coldBroker", brokerName);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TieredBrokerHostSelectorTest method testBasicSelect2.
@Test
public void testBasicSelect2() throws Exception {
String brokerName = (String) brokerSelector.select(Druids.newTimeseriesQueryBuilder().dataSource("test").granularity("all").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"))).intervals(Arrays.<Interval>asList(new Interval("2013-08-31/2013-09-01"))).build()).lhs;
Assert.assertEquals("hotBroker", brokerName);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class StringDimensionHandlerTest method getAdapters.
private static Pair<IncrementalIndexAdapter, IncrementalIndexAdapter> getAdapters(List<String> dims, Map<String, Object> event1, Map<String, Object> event2) throws Exception {
IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
incrementalIndex1.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis(), dims, event1));
incrementalIndex2.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis() + 3, dims, event2));
IncrementalIndexAdapter adapter1 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex1, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
IncrementalIndexAdapter adapter2 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex2, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
return new Pair<>(adapter1, adapter2);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method testSpatialQueryMorePoints.
@Test
public void testSpatialQueryMorePoints() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 17L).build())), new Result<>(new DateTime("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).build())), new Result<>(new DateTime("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())), new Result<>(new DateTime("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).build())), new Result<>(new DateTime("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 47L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
TestHelper.assertExpectedResults(expectedResults, runner.run(query, Maps.newHashMap()));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
Aggregations