use of org.joda.time.DateTime in project druid by druid-io.
the class QueryGranularityTest method testIterableQuarterSimple.
@Test
public void testIterableQuarterSimple() {
final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z");
assertSameInterval(Lists.newArrayList(new DateTime("2011-01-01T00:00:00.000Z"), new DateTime("2011-04-01T00:00:00.000Z"), new DateTime("2011-07-01T00:00:00.000Z")), Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class QueryGranularityTest method testIterableYearComplex.
@Test
public void testIterableYearComplex() {
final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z");
assertSameInterval(Lists.newArrayList(new DateTime("2011-01-01T00:00:00.000Z"), new DateTime("2012-01-01T00:00:00.000Z"), new DateTime("2013-01-01T00:00:00.000Z"), new DateTime("2014-01-01T00:00:00.000Z")), Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class QueryGranularityTest method testDurationToDateTime.
@Test
public void testDurationToDateTime() throws Exception {
final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00");
Granularity gran = new DurationGranularity(new Period("PT12H5M").toStandardDuration().getMillis(), origin);
Assert.assertEquals(new DateTime("2012-01-01T05:00:04.123-08:00"), gran.toDateTime(new DateTime("2012-01-01T05:00:04.123-08:00").getMillis()));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class QueryGranularityTest method testIterableQuarterComplex.
@Test
public void testIterableQuarterComplex() {
final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z");
assertSameInterval(Lists.newArrayList(new DateTime("2011-01-01T00:00:00.000Z"), new DateTime("2011-04-01T00:00:00.000Z"), new DateTime("2011-07-01T00:00:00.000Z"), new DateTime("2011-10-01T00:00:00.000Z")), Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())));
}
use of org.joda.time.DateTime in project druid by druid-io.
the class RetryQueryRunnerTest method testNoDuplicateRetry.
@Test
public void testNoDuplicateRetry() throws Exception {
Map<String, Object> context = new MapMaker().makeMap();
context.put("count", 0);
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
RetryQueryRunner<Result<TimeseriesResultValue>> runner = new RetryQueryRunner<>(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(Query<Result<TimeseriesResultValue>> query, Map<String, Object> context) {
if ((int) context.get("count") == 0) {
// assume 2 missing segments at first run
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1));
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
context.put("count", 1);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
} else if ((int) context.get("count") == 1) {
// this is first retry
Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2);
// assume only left 1 missing at first retry
((List) context.get(Result.MISSING_SEGMENTS_KEY)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 2));
context.put("count", 2);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
} else {
// this is second retry
Assert.assertTrue("Should retry with 1 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 1);
// assume no more missing at second retry
context.put("count", 3);
return Sequences.simple(Arrays.asList(new Result<>(new DateTime(), new TimeseriesResultValue(Maps.<String, Object>newHashMap()))));
}
}
}, (QueryToolChest) new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new RetryQueryRunnerConfig() {
private int numTries = 2;
private boolean returnPartialResults = false;
public int getNumTries() {
return numTries;
}
public boolean returnPartialResults() {
return returnPartialResults;
}
}, jsonMapper);
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
Assert.assertTrue("Should return a list with 3 elements", ((List) actualResults).size() == 3);
Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(Result.MISSING_SEGMENTS_KEY)).size() == 0);
}
Aggregations