use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class QueriesTest method testWithSpecificSegmentsOnUnionIsAnError.
@Test
public void testWithSpecificSegmentsOnUnionIsAnError() {
final ImmutableList<SegmentDescriptor> descriptors = ImmutableList.of(new SegmentDescriptor(Intervals.of("2000/3000"), "0", 0), new SegmentDescriptor(Intervals.of("2000/3000"), "0", 1));
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(new LookupDataSource("lookyloo")).intervals("2000/3000").granularity(Granularities.ALL).build();
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("Unable to apply specific segments to non-table-based dataSource");
final Query<Result<TimeseriesResultValue>> ignored = Queries.withSpecificSegments(query, descriptors);
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testEmptyTimeseries.
@Test
public void testEmptyTimeseries() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.EMPTY_INTERVAL).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM, new VarianceAggregatorFactory("variance", "index", null, null))).descending(true).context(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT).build();
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("rows", 0L);
resultMap.put("index", NullHandling.defaultDoubleValue());
resultMap.put("variance", NullHandling.defaultDoubleValue());
List<Result<TimeseriesResultValue>> expectedResults = ImmutableList.of(new Result<>(DateTimes.of("2020-04-02"), new TimeseriesResultValue(resultMap)));
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testTimeseriesWithNullFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithNullFilterOnNonExistentDimension() {
TimeseriesQuery query = queryBuilder.dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters("bobby", null).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(QueryRunnerTestHelper.of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", QueryRunnerTestHelper.UNIQUES_9, "index_var", descending ? 368885.6897238851 : 368885.689155086, "index_stddev", descending ? 607.3596049490657 : 607.35960448081))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(QueryRunnerTestHelper.of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", QueryRunnerTestHelper.UNIQUES_9, "index_var", descending ? 259061.6037088883 : 259061.60216419376, "index_stddev", descending ? 508.9809463122252 : 508.98094479478675))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
assertExpectedResults(expectedResults, results);
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class FieldAccessPostAggregatorTest method testResultArraySignature.
@Test
public void testResultArraySignature() {
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2000/3000").granularity(Granularities.HOUR).aggregators(new CountAggregatorFactory("count"), new DoubleSumAggregatorFactory("double", "col1"), new FloatSumAggregatorFactory("float", "col2")).postAggregators(new FieldAccessPostAggregator("a", "count"), new FieldAccessPostAggregator("b", "double"), new FieldAccessPostAggregator("c", "float")).build();
Assert.assertEquals(RowSignature.builder().addTimeColumn().add("count", ColumnType.LONG).add("double", ColumnType.DOUBLE).add("float", ColumnType.FLOAT).add("a", ColumnType.LONG).add("b", ColumnType.DOUBLE).add("c", ColumnType.FLOAT).build(), new TimeseriesQueryQueryToolChest().resultArraySignature(query));
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class JavaScriptPostAggregatorTest method testResultArraySignature.
@Test
public void testResultArraySignature() {
final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2000/3000").granularity(Granularities.HOUR).aggregators(new LongSumAggregatorFactory("total", "total"), new LongSumAggregatorFactory("delta", "delta")).postAggregators(new JavaScriptPostAggregator("a", Lists.newArrayList("delta", "total"), ABS_PERCENT_FUNCTION, JavaScriptConfig.getEnabledInstance())).build();
Assert.assertEquals(RowSignature.builder().addTimeColumn().add("total", ColumnType.LONG).add("delta", ColumnType.LONG).add("a", ColumnType.DOUBLE).build(), new TimeseriesQueryQueryToolChest().resultArraySignature(query));
}
Aggregations