use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithOtherMultiDimFilter.
@Test
public void testTimeseriesWithOtherMultiDimFilter() {
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build(), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.qualityDimension).value("business").build())).build();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 1L, "index", new Float(118.570340).doubleValue(), "addRowsIndexConstant", new Float(120.570340).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 1L, "index", new Float(112.987027).doubleValue(), "addRowsIndexConstant", new Float(114.987027).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggValueNotPresent.
@Test
public void testTimeSeriesWithFilteredAggValueNotPresent() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.firstToThird).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new NotDimFilter(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("LolLol").build())))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
assertExpectedResults(expectedResults, actualResults);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithVaryingGranWithFilter.
@Test
public void testTimeseriesWithVaryingGranWithFilter() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("P1M"), null, null)).intervals(Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.qualityUniques)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results1 = Sequences.toList(runner.run(query1, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults1, results1);
TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market").granularity("DAY").intervals(Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.qualityUniques)).build();
List<Result<TimeseriesResultValue>> expectedResults2 = Arrays.asList(new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results2 = Sequences.toList(runner.run(query2, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults2, results2);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithMultiDimFilter.
@Test
public void testTimeseriesWithMultiDimFilter() {
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build(), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.qualityDimension).value("automotive").build())).build();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 1L, "index", new Float(135.885094).doubleValue(), "addRowsIndexConstant", new Float(137.885094).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 1L, "index", new Float(147.425935).doubleValue(), "addRowsIndexConstant", new Float(149.425935).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAgg.
@Test
public void testTimeSeriesWithFilteredAgg() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.firstToThird).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build()))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("filteredAgg", 18L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
assertExpectedResults(expectedResults, actualResults);
}
Aggregations