use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithVaryingGranWithFilter.
@Test
public void testTimeseriesWithVaryingGranWithFilter() {
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("P1M"), null, null)).intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults1 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results1 = runner.run(QueryPlus.wrap(query1)).toList();
assertExpectedResults(expectedResults1, results1);
TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity("DAY").intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults2 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
Iterable<Result<TimeseriesResultValue>> results2 = runner.run(QueryPlus.wrap(query2)).toList();
assertExpectedResults(expectedResults2, results2);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class GranularityPathSpecTest method testBackwardCompatiblePeriodSegmentGranularitySerialization.
@Test
public void testBackwardCompatiblePeriodSegmentGranularitySerialization() throws JsonProcessingException {
final PeriodGranularity pt2S = new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC);
Assert.assertNotEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt2S));
final Granularity pt1S = Granularities.SECOND;
Assert.assertEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt1S));
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class CompactionTaskTest method testSegmentGranularityAndNullQueryGranularity.
@Test
public void testSegmentGranularityAndNullQueryGranularity() throws IOException, SegmentLoadingException {
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(new PeriodGranularity(Period.months(3), null, null), null, null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))));
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), Collections.singletonList(COMPACTION_INTERVAL), new PeriodGranularity(Period.months(3), null, null), Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class CompactionTaskTest method testQueryGranularityAndNullSegmentGranularity.
@Test
public void testQueryGranularityAndNullSegmentGranularity() throws IOException, SegmentLoadingException {
final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(null, new PeriodGranularity(Period.months(3), null, null), null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration();
ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), SEGMENT_INTERVALS, Granularities.MONTH, new PeriodGranularity(Period.months(3), null, null), IOConfig.DEFAULT_DROP_EXISTING);
}
use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.
the class ExprUtils method toPeriodGranularity.
static PeriodGranularity toPeriodGranularity(final Expr periodArg, @Nullable final Expr originArg, @Nullable final Expr timeZoneArg, final Expr.ObjectBinding bindings) {
final Period period = new Period(periodArg.eval(bindings).asString());
final DateTime origin;
final DateTimeZone timeZone;
if (timeZoneArg == null) {
timeZone = null;
} else {
final String value = timeZoneArg.eval(bindings).asString();
timeZone = value != null ? DateTimes.inferTzFromString(value) : null;
}
if (originArg == null) {
origin = null;
} else {
Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone);
final Object value = originArg.eval(bindings).value();
if (value instanceof String && NullHandling.isNullOrEquivalent((String) value)) {
// We get a blank string here, when sql compatible null handling is enabled
// and expression contains empty string for for origin
// e.g timestamp_floor(\"__time\",'PT1M','','UTC')
origin = null;
} else {
origin = value != null ? new DateTime(value, chronology) : null;
}
}
return new PeriodGranularity(period, origin, timeZone);
}
Aggregations