use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter.
@Test
public void testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("P7D"), null, DateTimes.inferTzFromString("America/Los_Angeles"))).intervals(Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(new Result<>(new DateTime("2011-01-06T00:00:00.000-08:00", DateTimes.inferTzFromString("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 6071L))), new Result<>(new DateTime("2011-01-13T00:00:00.000-08:00", DateTimes.inferTzFromString("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.of("rows", 91L, "idx", 33382L))));
Iterable<Result<TimeseriesResultValue>> results1 = runner.run(QueryPlus.wrap(query1)).toList();
assertExpectedResults(expectedResults1, results1);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithTimeZone.
@Test
public void testTimeseriesWithTimeZone() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).intervals("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00").aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"))).granularity(new PeriodGranularity(new Period("P1D"), null, DateTimes.inferTzFromString("America/Los_Angeles"))).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-03-31", DateTimes.inferTzFromString("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 6619L))), new Result<>(new DateTime("2011-04-01T", DateTimes.inferTzFromString("America/Los_Angeles")), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
assertExpectedResults(expectedResults, results);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithVaryingGran.
@Test
public void testTimeseriesWithVaryingGran() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(new PeriodGranularity(new Period("P1M"), null, null)).intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results1 = runner.run(QueryPlus.wrap(query1)).toList();
assertExpectedResults(expectedResults1, results1);
TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity("DAY").intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults2 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results2 = runner.run(QueryPlus.wrap(query2)).toList();
assertExpectedResults(expectedResults2, results2);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class NewestSegmentFirstPolicyTest method testIteratorReturnsSegmentsAsCompactionStateChangedWithCompactedStateHasSameSegmentGranularity.
@Test
public void testIteratorReturnsSegmentsAsCompactionStateChangedWithCompactedStateHasSameSegmentGranularity() {
// Different indexSpec as what is set in the auto compaction config
IndexSpec newIndexSpec = new IndexSpec(new ConciseBitmapSerdeFactory(), null, null, null);
Map<String, Object> newIndexSpecMap = mapper.convertValue(newIndexSpec, new TypeReference<Map<String, Object>>() {
});
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null));
// Create segments that were compacted (CompactionState != null) and have segmentGranularity=DAY
final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-10-02T00:00:00/2017-10-03T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, null, newIndexSpecMap, null)));
// Duration of new segmentGranularity is the same as before (P1D)
final CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(130000, new Period("P0D"), new UserCompactionTaskGranularityConfig(new PeriodGranularity(new Period("P1D"), null, DateTimeZone.UTC), null, null))), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
// We should get all segments in timeline back since indexSpec changed
Assert.assertTrue(iterator.hasNext());
List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-01T00:00:00/2017-10-03T00:00:00"), Partitions.ONLY_COMPLETE));
Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
// No more
Assert.assertFalse(iterator.hasNext());
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class NewestSegmentFirstPolicyTest method testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentOrigin.
@Test
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentOrigin() {
// Same indexSpec as what is set in the auto compaction config
Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {
});
// Same partitionsSpec as what is set in the auto compaction config
PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null));
// Create segments that were compacted (CompactionState != null) and have segmentGranularity=DAY
final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-10-02T00:00:00/2017-10-03T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, null, indexSpec, null)));
// Duration of new segmentGranularity is the same as before (P1D), but we changed the origin in the autocompaction spec
final CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(130000, new Period("P0D"), new UserCompactionTaskGranularityConfig(new PeriodGranularity(new Period("P1D"), DateTimes.of("2012-01-02T00:05:00.000Z"), DateTimeZone.UTC), null, null))), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
// We should get all segments in timeline back since skip offset is P0D.
Assert.assertTrue(iterator.hasNext());
List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-01T00:00:00/2017-10-03T00:00:00"), Partitions.ONLY_COMPLETE));
Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
// No more
Assert.assertFalse(iterator.hasNext());
}
Aggregations