use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class CalciteQueryTest method testFilterAndGroupByLookup.
@Test
public void testFilterAndGroupByLookup() throws Exception {
final RegisteredLookupExtractionFn extractionFn = new RegisteredLookupExtractionFn(null, "lookyloo", false, null, false, true);
testQuery("SELECT LOOKUP(dim1, 'lookyloo'), COUNT(*) FROM foo\n" + "WHERE LOOKUP(dim1, 'lookyloo') <> 'xxx'\n" + "GROUP BY LOOKUP(dim1, 'lookyloo')", ImmutableList.<Query>of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(QSS(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(NOT(SELECTOR("dim1", "xxx", extractionFn))).setDimensions(DIMS(new ExtractionDimensionSpec("dim1", "d0", ValueType.STRING, extractionFn))).setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "", 5L }, new Object[] { "xabc", 1L }));
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class KafkaSupervisorTest method getDataSchema.
private DataSchema getDataSchema(String dataSource) {
List<DimensionSchema> dimensions = new ArrayList<>();
dimensions.add(StringDimensionSchema.create("dim1"));
dimensions.add(StringDimensionSchema.create("dim2"));
return new DataSchema(dataSource, objectMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(dimensions, null, null), new JSONPathSpec(true, ImmutableList.<JSONPathFieldSpec>of()), ImmutableMap.<String, Boolean>of()), Charsets.UTF_8.name()), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, ImmutableList.<Interval>of()), objectMapper);
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class SpatialFilterTest method testSpatialQueryWithOtherSpatialDim.
@Test
public void testSpatialQueryWithOtherSpatialDim() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
TestHelper.assertExpectedResults(expectedResults, runner.run(query, Maps.newHashMap()));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class BaseFilterTest method selectCountUsingFilteredAggregator.
private long selectCountUsingFilteredAggregator(final DimFilter filter) {
final Sequence<Cursor> cursors = makeCursorSequence(makeFilter(filter));
Sequence<Aggregator> aggSeq = Sequences.map(cursors, new Function<Cursor, Aggregator>() {
@Override
public Aggregator apply(Cursor input) {
Aggregator agg = new FilteredAggregatorFactory(new CountAggregatorFactory("count"), maybeOptimize(filter)).factorize(input);
for (; !input.isDone(); input.advance()) {
agg.aggregate();
}
return agg;
}
});
return Sequences.toList(aggSeq, new ArrayList<Aggregator>()).get(0).getLong();
}
use of io.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class CachingClusteredClientFunctionalityTest method testUncoveredInterval.
@Test
public void testUncoveredInterval() throws Exception {
addToTimeline(new Interval("2015-01-02/2015-01-03"), "1");
addToTimeline(new Interval("2015-01-04/2015-01-05"), "1");
addToTimeline(new Interval("2015-02-04/2015-02-05"), "1");
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2015-01-02/2015-01-03").granularity("day").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"))).context(ImmutableMap.<String, Object>of("uncoveredIntervalsLimit", 3));
Map<String, Object> responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
Assert.assertNull(responseContext.get("uncoveredIntervals"));
builder.intervals("2015-01-01/2015-01-03");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-01/2015-01-02");
builder.intervals("2015-01-01/2015-01-04");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04");
builder.intervals("2015-01-02/2015-01-04");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-03/2015-01-04");
builder.intervals("2015-01-01/2015-01-30");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04", "2015-01-05/2015-01-30");
builder.intervals("2015-01-02/2015-01-30");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-03/2015-01-04", "2015-01-05/2015-01-30");
builder.intervals("2015-01-04/2015-01-30");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-05/2015-01-30");
builder.intervals("2015-01-10/2015-01-30");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, false, "2015-01-10/2015-01-30");
builder.intervals("2015-01-01/2015-02-25");
responseContext = new HashMap<>();
client.run(builder.build(), responseContext);
assertUncovered(responseContext, true, "2015-01-01/2015-01-02", "2015-01-03/2015-01-04", "2015-01-05/2015-02-04");
}
Aggregations