use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testTimeseriesWithNullFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithNullFilterOnNonExistentDimension() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(VarianceTestHelper.dataSource).granularity(VarianceTestHelper.dayGran).filters("bobby", null).intervals(VarianceTestHelper.firstToThird).aggregators(VarianceTestHelper.commonPlusVarAggregators).postAggregators(Arrays.<PostAggregator>asList(VarianceTestHelper.addRowsIndexConstant, VarianceTestHelper.stddevOfIndexPostAggr)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(VarianceTestHelper.of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", VarianceTestHelper.UNIQUES_9, "index_var", descending ? 368885.6897238851 : 368885.689155086, "index_stddev", descending ? 607.3596049490657 : 607.35960448081))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(VarianceTestHelper.of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", VarianceTestHelper.UNIQUES_9, "index_var", descending ? 259061.6037088883 : 259061.60216419376, "index_stddev", descending ? 508.9809463122252 : 508.98094479478675))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, new HashMap<String, Object>()), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class RealtimeIndexTaskTest method sumMetric.
public long sumMetric(final Task task, final String metric) throws Exception {
// Do a query.
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test_ds").aggregators(ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory(metric, metric))).granularity(Granularities.ALL).intervals("2000/3000").build();
ArrayList<Result<TimeseriesResultValue>> results = Sequences.toList(task.getQueryRunner(query).run(query, ImmutableMap.<String, Object>of()), Lists.<Result<TimeseriesResultValue>>newArrayList());
return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
}
use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class SchemaEvolutionTest method testNumericEvolutionTimeseriesAggregation.
@Test
public void testNumericEvolutionTimeseriesAggregation() {
final TimeseriesQueryRunnerFactory factory = QueryRunnerTestHelper.newTimeseriesQueryRunnerFactory();
// "c1" changes from string(1) -> long(2) -> float(3) -> nonexistent(4)
// test behavior of longSum/doubleSum with/without expressions
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals("1000/3000").aggregators(ImmutableList.of(new LongSumAggregatorFactory("a", "c1"), new DoubleSumAggregatorFactory("b", "c1"), new LongSumAggregatorFactory("c", null, "c1 * 1"), new DoubleSumAggregatorFactory("d", null, "c1 * 1"))).build();
// Only string(1)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L, "d", 0.0)), runQuery(query, factory, ImmutableList.of(index1)));
// Only long(2)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 31L, "b", 31.0, "c", 31L, "d", 31.0)), runQuery(query, factory, ImmutableList.of(index2)));
// Only float(3)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 31L, "b", THIRTY_ONE_POINT_ONE, "c", 31L, "d", THIRTY_ONE_POINT_ONE)), runQuery(query, factory, ImmutableList.of(index3)));
// Only nonexistent(4)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L, "d", 0.0)), runQuery(query, factory, ImmutableList.of(index4)));
// string(1) + long(2) + float(3) + nonexistent(4)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 31L * 2, "b", THIRTY_ONE_POINT_ONE + 31, "c", 31L * 2, "d", THIRTY_ONE_POINT_ONE + 31)), runQuery(query, factory, ImmutableList.of(index1, index2, index3, index4)));
// long(2) + float(3) + nonexistent(4)
Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 31L * 2, "b", THIRTY_ONE_POINT_ONE + 31, "c", 31L * 2, "d", THIRTY_ONE_POINT_ONE + 31)), runQuery(query, factory, ImmutableList.of(index2, index3, index4)));
}
use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class GroupByTimeseriesQueryRunnerTest method testFullOnTimeseriesMaxMin.
// GroupBy handles timestamps differently when granularity is ALL
@Test
public void testFullOnTimeseriesMaxMin() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(Granularities.ALL).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))).descending(descending).build();
DateTime expectedEarliest = new DateTime("1970-01-01");
DateTime expectedLast = new DateTime("2011-04-15");
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
Result<TimeseriesResultValue> result = results.iterator().next();
Assert.assertEquals(expectedEarliest, result.getTimestamp());
Assert.assertFalse(String.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast));
final TimeseriesResultValue value = result.getValue();
Assert.assertEquals(result.toString(), 1870.06103515625, value.getDoubleMetric("maxIndex"), 0.0);
Assert.assertEquals(result.toString(), 59.02102279663086, value.getDoubleMetric("minIndex"), 0.0);
}
use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class GroupByTimeseriesQueryRunnerTest method constructorFeeder.
@SuppressWarnings("unchecked")
@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> constructorFeeder() throws IOException {
GroupByQueryConfig config = new GroupByQueryConfig();
config.setMaxIntermediateRows(10000);
final GroupByQueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(config);
return QueryRunnerTestHelper.transformToConstructionFeeder(Lists.transform(QueryRunnerTestHelper.makeQueryRunners(factory), new Function<QueryRunner<Row>, Object>() {
@Nullable
@Override
public Object apply(final QueryRunner<Row> input) {
return new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
TimeseriesQuery tsQuery = (TimeseriesQuery) query;
QueryRunner<Row> newRunner = factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.<QueryRunner<Row>>of(input));
QueryToolChest toolChest = factory.getToolchest();
newRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(toolChest.preMergeQueryDecoration(newRunner)), toolChest);
return Sequences.map(newRunner.run(GroupByQuery.builder().setDataSource(tsQuery.getDataSource()).setQuerySegmentSpec(tsQuery.getQuerySegmentSpec()).setGranularity(tsQuery.getGranularity()).setDimFilter(tsQuery.getDimensionsFilter()).setAggregatorSpecs(tsQuery.getAggregatorSpecs()).setPostAggregatorSpecs(tsQuery.getPostAggregatorSpecs()).setVirtualColumns(tsQuery.getVirtualColumns()).build(), responseContext), new Function<Row, Result<TimeseriesResultValue>>() {
@Override
public Result<TimeseriesResultValue> apply(final Row input) {
MapBasedRow row = (MapBasedRow) input;
return new Result<TimeseriesResultValue>(row.getTimestamp(), new TimeseriesResultValue(row.getEvent()));
}
});
}
@Override
public String toString() {
return input.toString();
}
};
}
}));
}
Aggregations