Search in sources :

Example 51 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class TimeseriesQueryRunnerTest method testTimeseriesWithVaryingGranWithFilter.

@Test
public void testTimeseriesWithVaryingGranWithFilter() {
    TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity(new PeriodGranularity(new Period("P1M"), null, null)).intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).context(makeContext()).build();
    List<Result<TimeseriesResultValue>> expectedResults1 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
    Iterable<Result<TimeseriesResultValue>> results1 = runner.run(QueryPlus.wrap(query1)).toList();
    assertExpectedResults(expectedResults1, results1);
    TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market").granularity("DAY").intervals(Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z"))).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).context(makeContext()).build();
    List<Result<TimeseriesResultValue>> expectedResults2 = Collections.singletonList(new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9))));
    Iterable<Result<TimeseriesResultValue>> results2 = runner.run(QueryPlus.wrap(query2)).toList();
    assertExpectedResults(expectedResults2, results2);
}
Also used : PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Period(org.joda.time.Period) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 52 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class GranularityPathSpecTest method testBackwardCompatiblePeriodSegmentGranularitySerialization.

@Test
public void testBackwardCompatiblePeriodSegmentGranularitySerialization() throws JsonProcessingException {
    final PeriodGranularity pt2S = new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC);
    Assert.assertNotEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt2S));
    final Granularity pt1S = Granularities.SECOND;
    Assert.assertEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt1S));
}
Also used : PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) Granularity(org.apache.druid.java.util.common.granularity.Granularity) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Test(org.junit.Test)

Example 53 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class CompactionTaskTest method testSegmentGranularityAndNullQueryGranularity.

@Test
public void testSegmentGranularityAndNullQueryGranularity() throws IOException, SegmentLoadingException {
    final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(new PeriodGranularity(Period.months(3), null, null), null, null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
    final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))));
    ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
    Assert.assertEquals(1, ingestionSpecs.size());
    assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), Collections.singletonList(COMPACTION_INTERVAL), new PeriodGranularity(Period.months(3), null, null), Granularities.NONE, IOConfig.DEFAULT_DROP_EXISTING);
}
Also used : DoubleDimensionSchema(org.apache.druid.data.input.impl.DoubleDimensionSchema) PartitionConfigurationManager(org.apache.druid.indexing.common.task.CompactionTask.PartitionConfigurationManager) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) ParallelIndexIngestionSpec(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIngestionSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) SegmentProvider(org.apache.druid.indexing.common.task.CompactionTask.SegmentProvider) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) DoubleLastAggregatorFactory(org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory) FloatMinAggregatorFactory(org.apache.druid.query.aggregation.FloatMinAggregatorFactory) FloatFirstAggregatorFactory(org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) Test(org.junit.Test)

Example 54 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class CompactionTaskTest method testQueryGranularityAndNullSegmentGranularity.

@Test
public void testQueryGranularityAndNullSegmentGranularity() throws IOException, SegmentLoadingException {
    final List<ParallelIndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(toolbox, LockGranularity.TIME_CHUNK, new SegmentProvider(DATA_SOURCE, new CompactionIntervalSpec(COMPACTION_INTERVAL, null)), new PartitionConfigurationManager(TUNING_CONFIG), null, null, null, new ClientCompactionTaskGranularitySpec(null, new PeriodGranularity(Period.months(3), null, null), null), COORDINATOR_CLIENT, segmentCacheManagerFactory, RETRY_POLICY_FACTORY, IOConfig.DEFAULT_DROP_EXISTING);
    final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration();
    ingestionSpecs.sort((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getDataSchema().getGranularitySpec().inputIntervals().get(0), s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)));
    Assert.assertEquals(6, ingestionSpecs.size());
    assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS.stream().map(AggregatorFactory::getCombiningFactory).collect(Collectors.toList()), SEGMENT_INTERVALS, Granularities.MONTH, new PeriodGranularity(Period.months(3), null, null), IOConfig.DEFAULT_DROP_EXISTING);
}
Also used : PartitionConfigurationManager(org.apache.druid.indexing.common.task.CompactionTask.PartitionConfigurationManager) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) ParallelIndexIngestionSpec(org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIngestionSpec) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) SegmentProvider(org.apache.druid.indexing.common.task.CompactionTask.SegmentProvider) ClientCompactionTaskGranularitySpec(org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec) DoubleLastAggregatorFactory(org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory) FloatMinAggregatorFactory(org.apache.druid.query.aggregation.FloatMinAggregatorFactory) FloatFirstAggregatorFactory(org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) Test(org.junit.Test)

Example 55 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class ExprUtils method toPeriodGranularity.

static PeriodGranularity toPeriodGranularity(final Expr periodArg, @Nullable final Expr originArg, @Nullable final Expr timeZoneArg, final Expr.ObjectBinding bindings) {
    final Period period = new Period(periodArg.eval(bindings).asString());
    final DateTime origin;
    final DateTimeZone timeZone;
    if (timeZoneArg == null) {
        timeZone = null;
    } else {
        final String value = timeZoneArg.eval(bindings).asString();
        timeZone = value != null ? DateTimes.inferTzFromString(value) : null;
    }
    if (originArg == null) {
        origin = null;
    } else {
        Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone);
        final Object value = originArg.eval(bindings).value();
        if (value instanceof String && NullHandling.isNullOrEquivalent((String) value)) {
            // We get a blank string here, when sql compatible null handling is enabled
            // and expression contains empty string for for origin
            // e.g timestamp_floor(\"__time\",'PT1M','','UTC')
            origin = null;
        } else {
            origin = value != null ? new DateTime(value, chronology) : null;
        }
    }
    return new PeriodGranularity(period, origin, timeZone);
}
Also used : PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) Chronology(org.joda.time.Chronology) ISOChronology(org.joda.time.chrono.ISOChronology) DateTime(org.joda.time.DateTime) DateTimeZone(org.joda.time.DateTimeZone)

Aggregations

PeriodGranularity (org.apache.druid.java.util.common.granularity.PeriodGranularity)65 Test (org.junit.Test)57 Period (org.joda.time.Period)52 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)33 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)27 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)24 DateTime (org.joda.time.DateTime)13 QueryRunner (org.apache.druid.query.QueryRunner)11 Result (org.apache.druid.query.Result)11 ArrayList (java.util.ArrayList)10 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)10 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)10 ResponseContext (org.apache.druid.query.context.ResponseContext)9 DateTimeZone (org.joda.time.DateTimeZone)9 ChainedExecutionQueryRunner (org.apache.druid.query.ChainedExecutionQueryRunner)8 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)8 Sequence (org.apache.druid.java.util.common.guava.Sequence)7 QueryPlus (org.apache.druid.query.QueryPlus)7 Interval (org.joda.time.Interval)7 QueryDataSource (org.apache.druid.query.QueryDataSource)6