use of org.joda.time.Interval in project druid by druid-io.
the class TimeseriesQueryRunnerBonusTest method runTimeseriesCount.
private List<Result<TimeseriesResultValue>> runTimeseriesCount(IncrementalIndex index) {
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
final QueryRunner<Result<TimeseriesResultValue>> runner = makeQueryRunner(factory, new IncrementalIndexSegment(index, null));
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.<AggregatorFactory>of(new CountAggregatorFactory("rows"))).descending(descending).build();
HashMap<String, Object> context = new HashMap<String, Object>();
return Sequences.toList(runner.run(query, context), Lists.<Result<TimeseriesResultValue>>newArrayList());
}
use of org.joda.time.Interval in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNBySegmentResults.
@Test
public void testTopNBySegmentResults() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(QueryRunnerTestHelper.marketDimension).metric(QueryRunnerTestHelper.dependentPostAggMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant, QueryRunnerTestHelper.dependentPostAgg)).context(ImmutableMap.<String, Object>of("finalize", true, "bySegment", true)).build();
TopNResultValue topNResult = new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.marketDimension, "total_market").put("rows", 186L).put("index", 215679.82879638672D).put("addRowsIndexConstant", 215866.82879638672D).put(QueryRunnerTestHelper.dependentPostAggMetric, 216053.82879638672D).put("uniques", QueryRunnerTestHelper.UNIQUES_2).put("maxIndex", 1743.9217529296875D).put("minIndex", 792.3260498046875D).build(), ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.marketDimension, "upfront").put("rows", 186L).put("index", 192046.1060180664D).put("addRowsIndexConstant", 192233.1060180664D).put(QueryRunnerTestHelper.dependentPostAggMetric, 192420.1060180664D).put("uniques", QueryRunnerTestHelper.UNIQUES_2).put("maxIndex", 1870.06103515625D).put("minIndex", 545.9906005859375D).build(), ImmutableMap.<String, Object>builder().put(QueryRunnerTestHelper.marketDimension, "spot").put("rows", 837L).put("index", 95606.57232284546D).put("addRowsIndexConstant", 96444.57232284546D).put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D).put("uniques", QueryRunnerTestHelper.UNIQUES_9).put("maxIndex", 277.2735290527344D).put("minIndex", 59.02102279663086D).build()));
List<Result<BySegmentResultValueClass>> expectedResults = Collections.singletonList(new Result<BySegmentResultValueClass>(new DateTime("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass(Collections.singletonList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), topNResult)), QueryRunnerTestHelper.segmentId, new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"))));
Sequence<Result<TopNResultValue>> results = runWithMerge(query);
for (Result<TopNResultValue> result : Sequences.toList(results, new ArrayList<Result<TopNResultValue>>())) {
// TODO: fix this test
Assert.assertEquals(result.getValue(), result.getValue());
}
}
use of org.joda.time.Interval in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithVaryingGranWithFilter.
@Test
public void testTimeseriesWithVaryingGranWithFilter() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("P1M"), null, null)).intervals(Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.qualityUniques)).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(new Result<>(new DateTime("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results1 = Sequences.toList(runner.run(query1, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults1, results1);
TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market").granularity("DAY").intervals(Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.qualityUniques)).build();
List<Result<TimeseriesResultValue>> expectedResults2 = Arrays.asList(new Result<>(new DateTime("2011-04-02"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results2 = Sequences.toList(runner.run(query2, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults2, results2);
}
use of org.joda.time.Interval in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesQueryGranularityNotAlignedWithRollupGranularity.
@Test
public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("PT1H"), new DateTime(60000), DateTimeZone.UTC)).intervals(Arrays.asList(new Interval("2011-04-15T00:00:00.000Z/2012"))).aggregators(Arrays.<AggregatorFactory>asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(new Result<>(new DateTime("2011-04-14T23:01Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>of("rows", 13L, "idx", 4717L))));
Iterable<Result<TimeseriesResultValue>> results1 = Sequences.toList(runner.run(query1, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
assertExpectedResults(expectedResults1, results1);
}
use of org.joda.time.Interval in project pinot by linkedin.
the class PinotSegmentUploadRestletResource method isSegmentTimeValid.
/**
* Returns true if:
* - Segment does not have a start/end time, OR
* - The start/end time are in a valid range (Jan 01 1971 - Jan 01, 2071)
* @param metadata
* @return
*/
private boolean isSegmentTimeValid(SegmentMetadata metadata) {
Interval interval = metadata.getTimeInterval();
if (interval == null) {
return true;
}
long startMillis = interval.getStartMillis();
long endMillis = interval.getEndMillis();
if (!TimeUtils.timeValueInValidRange(startMillis) || !TimeUtils.timeValueInValidRange(endMillis)) {
Date startDate = new Date(interval.getStartMillis());
Date endDate = new Date(interval.getEndMillis());
Date minDate = new Date(TimeUtils.getValidMinTimeMillis());
Date maxDate = new Date(TimeUtils.getValidMaxTimeMillis());
LOGGER.error("Invalid start time '{}' or end time '{}' for segment, must be between '{}' and '{}'", startDate, endDate, minDate, maxDate);
return false;
}
return true;
}
Aggregations