use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithMultiDimFilterAndOr.
@Test
public void testTimeseriesWithMultiDimFilterAndOr() {
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build(), Druids.newOrDimFilterBuilder().fields(QueryRunnerTestHelper.qualityDimension, "automotive", "business").build())).build();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 2L, "index", 254.4554443359375D, "addRowsIndexConstant", 257.4554443359375D, "uniques", QueryRunnerTestHelper.UNIQUES_2))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 2L, "index", 260.4129638671875D, "addRowsIndexConstant", 263.4129638671875D, "uniques", QueryRunnerTestHelper.UNIQUES_2))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggInvertedNullValue.
@Test
public void testTimeSeriesWithFilteredAggInvertedNullValue() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).intervals(QueryRunnerTestHelper.firstToThird).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new NotDimFilter(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value(null).build())))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
assertExpectedResults(expectedResults, actualResults);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithNonExistentFilterAndMultiDim.
@Test
public void testTimeseriesWithNonExistentFilterAndMultiDim() {
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("billy").build(), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.qualityDimension).value("business").build())).build();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, "uniques", 0.0))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 0L, "index", 0.0, "addRowsIndexConstant", 1.0, "uniques", 0.0))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testFullOnTimeseries.
@Test
public void testFullOnTimeseries() {
Granularity gran = Granularities.DAY;
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(gran).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum, QueryRunnerTestHelper.qualityUniques)).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
final String[] expectedIndex = descending ? QueryRunnerTestHelper.expectedFullOnIndexValuesDesc : QueryRunnerTestHelper.expectedFullOnIndexValues;
final DateTime expectedLast = descending ? QueryRunnerTestHelper.earliest : QueryRunnerTestHelper.last;
int count = 0;
Result lastResult = null;
for (Result<TimeseriesResultValue> result : results) {
DateTime current = result.getTimestamp();
Assert.assertFalse(String.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast));
final TimeseriesResultValue value = result.getValue();
Assert.assertEquals(result.toString(), QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L, value.getLongMetric("rows").longValue());
Assert.assertEquals(result.toString(), expectedIndex[count], String.valueOf(value.getDoubleMetric("index")));
Assert.assertEquals(result.toString(), new Double(expectedIndex[count]) + (QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L) + 1L, value.getDoubleMetric("addRowsIndexConstant"), 0.0);
Assert.assertEquals(value.getDoubleMetric("uniques"), QueryRunnerTestHelper.skippedDay.equals(current) ? 0.0d : 9.0d, 0.02);
lastResult = result;
++count;
}
Assert.assertEquals(lastResult.toString(), expectedLast, lastResult.getTimestamp());
}
use of io.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithTimeZone.
@Test
public void testTimeseriesWithTimeZone() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).intervals("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00").aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).granularity(new PeriodGranularity(new Period("P1D"), null, DateTimeZone.forID("America/Los_Angeles"))).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-03-31", DateTimeZone.forID("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 6619L))), new Result<>(new DateTime("2011-04-01T", DateTimeZone.forID("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
Aggregations