use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class TestHelper method assertTimeseriesResultValue.
private static void assertTimeseriesResultValue(String msg, Result expected, Result actual) {
// Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't
// always generate exactly the same results (different merge ordering / float vs double)
Assert.assertEquals(StringUtils.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp());
TimeseriesResultValue expectedVal = (TimeseriesResultValue) expected.getValue();
TimeseriesResultValue actualVal = (TimeseriesResultValue) actual.getValue();
final Map<String, Object> expectedMap = expectedVal.getBaseObject();
final Map<String, Object> actualMap = actualVal.getBaseObject();
assertRow(msg, new MapBasedRow(expected.getTimestamp(), expectedMap), new MapBasedRow(actual.getTimestamp(), actualMap));
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class VectorizedVirtualColumnTest method testTimeseriesNoVirtual.
private void testTimeseriesNoVirtual(ColumnCapabilities capabilities, Map<String, Object> context) {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().intervals("2000/2030").dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(Granularities.ALL).virtualColumns().aggregators(new CountAggregatorFactory(COUNT)).context(context).build();
Sequence seq = timeseriesTestHelper.runQueryOnSegmentsObjs(segments, query);
List<Result<TimeseriesResultValue>> expectedResults = ImmutableList.of(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.of(COUNT, 2418L))));
TestHelper.assertExpectedObjects(expectedResults, seq.toList(), "failed");
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class TimewarpOperatorTest method testPostProcessWithTimezonesAndDstShift.
@Test
public void testPostProcessWithTimezonesAndDstShift() {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-09T-08"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-01-11T-08"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))));
}
}, DateTimes.of("2014-08-02T-07").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31T-07/2014-08-05T-07").granularity(new PeriodGranularity(new Period("P1D"), null, DateTimes.inferTzFromString("America/Los_Angeles"))).aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(DateTimes.of("2014-07-31T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-08-02T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(DateTimes.of("2014-08-02T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))), queryRunner.run(QueryPlus.wrap(query)).toList());
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class TimewarpOperatorTest method testPostProcessWithTimezonesAndNoDstShift.
@Test
public void testPostProcessWithTimezonesAndNoDstShift() {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-09T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-01-11T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))));
}
}, DateTimes.of("2014-08-02T-07").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31T-07/2014-08-05T-07").granularity(new PeriodGranularity(new Period("P1D"), null, DateTimes.inferTzFromString("America/Phoenix"))).aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(DateTimes.of("2014-07-31T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-08-02T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(DateTimes.of("2014-08-02T-07"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))), queryRunner.run(QueryPlus.wrap(query)).toList());
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class GroupByTimeseriesQueryRunnerTest method testFullOnTimeseriesMaxMin.
// GroupBy handles timestamps differently when granularity is ALL
@Override
@Test
public void testFullOnTimeseriesMaxMin() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(Granularities.ALL).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index")).descending(descending).build();
DateTime expectedEarliest = DateTimes.of("1970-01-01");
DateTime expectedLast = DateTimes.of("2011-04-15");
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
Result<TimeseriesResultValue> result = results.iterator().next();
Assert.assertEquals(expectedEarliest, result.getTimestamp());
Assert.assertFalse(StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast));
final TimeseriesResultValue value = result.getValue();
Assert.assertEquals(result.toString(), 1870.061029, value.getDoubleMetric("maxIndex"), 1870.061029 * 1e-6);
Assert.assertEquals(result.toString(), 59.021022, value.getDoubleMetric("minIndex"), 59.021022 * 1e-6);
}
Aggregations