use of org.joda.time.DateTime in project druid by druid-io.
the class RetryQueryRunnerTest method testRetryMultiple.
@Test
public void testRetryMultiple() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") < 3) {
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
context.put("count", (int) context.get("count") + 1);
return Sequences.empty();
} else {
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 4;
private boolean returnPartialResults = true;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
use of org.joda.time.DateTime in project druid by druid-io.
the class TimewarpOperatorTest method testEmptyFutureInterval.
@Test
public void testEmptyFutureInterval() throws Exception {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(query.getIntervals().get(0).getStart(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))));
}
}, new DateTime("2014-08-02").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-08-06/2014-08-08").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3)))), Sequences.toList(queryRunner.run(query, Maps.<String, Object>newHashMap()), Lists.<Result<TimeseriesResultValue>>newArrayList()));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class TimewarpOperatorTest method testPostProcess.
@Test
public void testPostProcess() throws Exception {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(new DateTime(new DateTime("2014-01-09")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime(new DateTime("2014-01-11")), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))), new Result<>(query.getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5)))));
}
}, new DateTime("2014-08-02").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31/2014-08-05").aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-07-31"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))), new Result<>(new DateTime("2014-08-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5)))), Sequences.toList(queryRunner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList()));
TimewarpOperator<Result<TimeBoundaryResultValue>> timeBoundaryOperator = new TimewarpOperator<>(new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), new Period("P1W"), // align on Monday
new DateTime("2014-01-06"));
QueryRunner<Result<TimeBoundaryResultValue>> timeBoundaryRunner = timeBoundaryOperator.postProcess(new QueryRunner<Result<TimeBoundaryResultValue>>() {
@Override
public Sequence<Result<TimeBoundaryResultValue>> run(Query<Result<TimeBoundaryResultValue>> query, Map<String, Object> responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(new DateTime("2014-01-12"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", new DateTime("2014-01-12"))))));
}
}, new DateTime("2014-08-02").getMillis());
final Query<Result<TimeBoundaryResultValue>> timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("dummy").build();
Assert.assertEquals(Lists.newArrayList(new Result<>(new DateTime("2014-08-02"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", new DateTime("2014-08-02"))))), Sequences.toList(timeBoundaryRunner.run(timeBoundaryQuery, CONTEXT), Lists.<Result<TimeBoundaryResultValue>>newArrayList()));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class DataSourceMetadataQueryTest method testResultSerialization.
@Test
public void testResultSerialization() {
final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(new DateTime("2000-01-01T00Z"));
final Map<String, Object> resultValueMap = new DefaultObjectMapper().convertValue(resultValue, new TypeReference<Map<String, Object>>() {
});
Assert.assertEquals(ImmutableMap.<String, Object>of("maxIngestedEventTime", "2000-01-01T00:00:00.000Z"), resultValueMap);
}
use of org.joda.time.DateTime in project druid by druid-io.
the class DataSourceMetadataQueryTest method testMaxIngestedEventTime.
@Test
public void testMaxIngestedEventTime() throws Exception {
final IncrementalIndex rtIndex = new OnheapIncrementalIndex(0L, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
;
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner((QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(QueryRunnerTestHelper.NOOP_QUERYWATCHER), new IncrementalIndexSegment(rtIndex, "test"), null);
DateTime timestamp = new DateTime(System.currentTimeMillis());
rtIndex.add(new MapBasedInputRow(timestamp.getMillis(), ImmutableList.of("dim1"), ImmutableMap.<String, Object>of("dim1", "x")));
DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder().dataSource("testing").build();
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
Iterable<Result<DataSourceMetadataResultValue>> results = Sequences.toList(runner.run(dataSourceMetadataQuery, context), Lists.<Result<DataSourceMetadataResultValue>>newArrayList());
DataSourceMetadataResultValue val = results.iterator().next().getValue();
DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
Assert.assertEquals(timestamp, maxIngestedEventTime);
}
Aggregations