use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testTimeseriesWithNullFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithNullFilterOnNonExistentDimension() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(VarianceTestHelper.dataSource).granularity(VarianceTestHelper.dayGran).filters("bobby", null).intervals(VarianceTestHelper.firstToThird).aggregators(VarianceTestHelper.commonPlusVarAggregators).postAggregators(Arrays.<PostAggregator>asList(VarianceTestHelper.addRowsIndexConstant, VarianceTestHelper.stddevOfIndexPostAggr)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(VarianceTestHelper.of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", VarianceTestHelper.UNIQUES_9, "index_var", descending ? 368885.6897238851 : 368885.689155086, "index_stddev", descending ? 607.3596049490657 : 607.35960448081))), new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(VarianceTestHelper.of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", VarianceTestHelper.UNIQUES_9, "index_var", descending ? 259061.6037088883 : 259061.60216419376, "index_stddev", descending ? 508.9809463122252 : 508.98094479478675))));
Iterable<Result<TimeseriesResultValue>> results = Sequences.toList(runner.run(query, new HashMap<String, Object>()), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults, results);
}
use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class RealtimeIndexTaskTest method sumMetric.
public long sumMetric(final Task task, final String metric) throws Exception {
// Do a query.
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test_ds").aggregators(ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory(metric, metric))).granularity(Granularities.ALL).intervals("2000/3000").build();
ArrayList<Result<TimeseriesResultValue>> results = Sequences.toList(task.getQueryRunner(query).run(query, ImmutableMap.<String, Object>of()), Lists.<Result<TimeseriesResultValue>>newArrayList());
return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
}
use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class RetryQueryRunnerTest method testNoDuplicateRetry.
@Test
public void testNoDuplicateRetry() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") == 0) {
// assume 2 missing segments at first run
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
context.put("count", 1);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
} else if ((int) context.get("count") == 1) {
// this is first retry
Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2);
// assume only left 1 missing at first retry
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
context.put("count", 2);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
} else {
// this is second retry
Assert.assertTrue("Should retry with 1 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 1);
// assume no more missing at second retry
context.put("count", 3);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 2;
private boolean returnPartialResults = false;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with 3 elements", ((List) actualResults).size() == 3);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class RetryQueryRunnerTest method testRetryMultiple.
@Test
public void testRetryMultiple() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") < 3) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
context.put("count", (int) context.get("count") + 1);
return Sequences.empty();
} else {
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 4;
private boolean returnPartialResults = true;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
use of io.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class TimewarpOperatorTest method testEmptyFutureInterval.
@Test
public void testEmptyFutureInterval() throws Exception {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(query.getIntervals().get(0).getStart(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))));
}
}, new DateTime("2014-08-02").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-08-06/2014-08-08").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))), Sequences.toList(queryRunner.run(query, Maps.<String, Object>newHashMap()), Lists.<Result<TimeseriesResultValue>>newArrayList()));
}
Aggregations