use of io.druid.query.aggregation.FilteredAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggDimensionNotPresentNotNullValue.
@Test
public void testTimeSeriesWithFilteredAggDimensionNotPresentNotNullValue() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.firstToThird).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), Druids.newSelectorDimFilterBuilder().dimension("abraKaDabra").value("Lol").build()))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("filteredAgg", 0L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
assertExpectedResults(expectedResults, actualResults);
}
use of io.druid.query.aggregation.FilteredAggregatorFactory in project druid by druid-io.
the class Aggregation method filter.
public Aggregation filter(final DimFilter filter) {
if (filter == null) {
return this;
}
if (postAggregator != null) {
// Verify that this Aggregation contains all inputs. If not, this "filter" call won't work right.
final Set<String> dependentFields = postAggregator.getDependentFields();
final Set<String> aggregatorNames = Sets.newHashSet();
for (AggregatorFactory aggregatorFactory : aggregatorFactories) {
aggregatorNames.add(aggregatorFactory.getName());
}
for (String field : dependentFields) {
if (!aggregatorNames.contains(field)) {
throw new ISE("Cannot filter an Aggregation that does not contain its inputs: %s", this);
}
}
}
final List<AggregatorFactory> newAggregators = Lists.newArrayList();
for (AggregatorFactory agg : aggregatorFactories) {
newAggregators.add(new FilteredAggregatorFactory(agg, filter));
}
return new Aggregation(newAggregators, postAggregator, finalizingPostAggregatorFactory);
}
use of io.druid.query.aggregation.FilteredAggregatorFactory in project druid by druid-io.
the class SpatialFilterBonusTest method testSpatialQueryFilteredAggregator.
@Test
public void testSpatialQueryFilteredAggregator() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new FilteredAggregatorFactory(new LongSumAggregatorFactory("valFiltered", "val"), new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 4995L).put("val", 12497502L).put("valFiltered", 17L).build())), new Result<>(new DateTime("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).put("valFiltered", 29L).build())), new Result<>(new DateTime("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).put("valFiltered", 13L).build())), new Result<>(new DateTime("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).put("valFiltered", 91L).build())), new Result<>(new DateTime("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 2L).put("val", 548L).put("valFiltered", 47L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of io.druid.query.aggregation.FilteredAggregatorFactory in project druid by druid-io.
the class IncrementalIndexTest method constructorFeeder.
@Parameterized.Parameters
public static Collection<?> constructorFeeder() throws IOException {
DimensionsSpec dimensions = new DimensionsSpec(Arrays.<DimensionSchema>asList(new StringDimensionSchema("string"), new StringDimensionSchema("float"), new StringDimensionSchema("long")), null, null);
AggregatorFactory[] metrics = { new FilteredAggregatorFactory(new CountAggregatorFactory("cnt"), new SelectorDimFilter("billy", "A", null)) };
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(0).withQueryGranularity(Granularities.MINUTE).withDimensionsSpec(dimensions).withMetrics(metrics).withRollup(true).build();
final List<Object[]> constructors = Lists.newArrayList();
for (final Boolean sortFacts : ImmutableList.of(false, true)) {
constructors.add(new Object[] { new IndexCreator() {
@Override
public IncrementalIndex createIndex() {
return new OnheapIncrementalIndex(schema, false, true, sortFacts, 1000);
}
} });
constructors.add(new Object[] { new IndexCreator() {
@Override
public IncrementalIndex createIndex() {
return new OffheapIncrementalIndex(schema, true, true, sortFacts, 1000000, new StupidPool<ByteBuffer>("OffheapIncrementalIndex-bufferPool", new Supplier<ByteBuffer>() {
@Override
public ByteBuffer get() {
return ByteBuffer.allocate(256 * 1024);
}
}));
}
} });
}
return constructors;
}
use of io.druid.query.aggregation.FilteredAggregatorFactory in project druid by druid-io.
the class IncrementalIndexTest method testFilteredAggregators.
@Test
public void testFilteredAggregators() throws Exception {
long timestamp = System.currentTimeMillis();
IncrementalIndex index = closer.closeLater(indexCreator.createIndex(new AggregatorFactory[] { new CountAggregatorFactory("count"), new FilteredAggregatorFactory(new CountAggregatorFactory("count_selector_filtered"), new SelectorDimFilter("dim2", "2", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_bound_filtered"), new BoundDimFilter("dim2", "2", "3", false, true, null, null, StringComparators.NUMERIC)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_multivaldim_filtered"), new SelectorDimFilter("dim3", "b", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("count_numeric_filtered"), new SelectorDimFilter("met1", "11", null)) }));
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "dim3", Lists.newArrayList("b", "a"), "met1", 10)));
index.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2", "dim3"), ImmutableMap.<String, Object>of("dim1", "3", "dim2", "4", "dim3", Lists.newArrayList("c", "d"), "met1", 11)));
Assert.assertEquals(Arrays.asList("dim1", "dim2", "dim3"), index.getDimensionNames());
Assert.assertEquals(Arrays.asList("count", "count_selector_filtered", "count_bound_filtered", "count_multivaldim_filtered", "count_numeric_filtered"), index.getMetricNames());
Assert.assertEquals(2, index.size());
final Iterator<Row> rows = index.iterator();
Row row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("a", "b"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getLongMetric("count"));
Assert.assertEquals(1L, row.getLongMetric("count_selector_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_bound_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_multivaldim_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_numeric_filtered"));
row = rows.next();
Assert.assertEquals(timestamp, row.getTimestampFromEpoch());
Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1"));
Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2"));
Assert.assertEquals(Arrays.asList("c", "d"), row.getDimension("dim3"));
Assert.assertEquals(1L, row.getLongMetric("count"));
Assert.assertEquals(0L, row.getLongMetric("count_selector_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_bound_filtered"));
Assert.assertEquals(0L, row.getLongMetric("count_multivaldim_filtered"));
Assert.assertEquals(1L, row.getLongMetric("count_numeric_filtered"));
}
Aggregations