Search in sources :

Example 71 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class SpatialFilterTest method testSpatialQueryWithOtherSpatialDim.

@Test
public void testSpatialQueryWithOtherSpatialDim() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<TimeseriesResultValue>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) RadiusBound(org.apache.druid.collections.spatial.search.RadiusBound) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 72 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class DataGeneratorTest method testToIndex.

@Test
public void testToIndex() {
    List<GeneratorColumnSchema> schemas = new ArrayList<>();
    schemas.add(GeneratorColumnSchema.makeSequential("dimA", ValueType.STRING, false, 1, null, 10, 20));
    schemas.add(GeneratorColumnSchema.makeEnumeratedSequential("dimB", ValueType.STRING, false, 1, null, Arrays.asList("Hello", "World", "Foo", "Bar")));
    schemas.add(GeneratorColumnSchema.makeSequential("dimC", ValueType.STRING, false, 1, 0.50, 30, 40));
    DataGenerator dataGenerator = new DataGenerator(schemas, 9999, 0, 0, 1000.0);
    DimensionsSpec dimensions = new DimensionsSpec(Arrays.asList(new StringDimensionSchema("dimA"), new StringDimensionSchema("dimB"), new StringDimensionSchema("dimC")));
    AggregatorFactory[] metrics = { new CountAggregatorFactory("cnt") };
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withDimensionsSpec(dimensions).withMetrics(metrics).withRollup(false).build();
    IncrementalIndex index = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setSortFacts(false).setMaxRowCount(1_000_000).build();
    dataGenerator.addToIndex(index, 100);
    Assert.assertEquals(100, index.size());
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) ArrayList(java.util.ArrayList) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) StringDimensionSchema(org.apache.druid.data.input.impl.StringDimensionSchema) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DimensionsSpec(org.apache.druid.data.input.impl.DimensionsSpec) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 73 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class SinkTest method testSwap.

@Test
public void testSwap() throws Exception {
    final DataSchema schema = new DataSchema("test", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null);
    final Interval interval = Intervals.of("2013-01-01/2013-01-02");
    final String version = DateTimes.nowUtc().toString();
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 100, null, null, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null, null, null, null);
    final Sink sink = new Sink(interval, schema, tuningConfig.getShardSpec(), version, tuningConfig.getAppendableIndexSpec(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getMaxBytesInMemoryOrDefault(), true, tuningConfig.getDedupColumn());
    sink.add(new InputRow() {

        @Override
        public List<String> getDimensions() {
            return new ArrayList<>();
        }

        @Override
        public long getTimestampFromEpoch() {
            return DateTimes.of("2013-01-01").getMillis();
        }

        @Override
        public DateTime getTimestamp() {
            return DateTimes.of("2013-01-01");
        }

        @Override
        public List<String> getDimension(String dimension) {
            return new ArrayList<>();
        }

        @Override
        public Number getMetric(String metric) {
            return 0;
        }

        @Override
        public Object getRaw(String dimension) {
            return null;
        }

        @Override
        public int compareTo(Row o) {
            return 0;
        }
    }, false);
    FireHydrant currHydrant = sink.getCurrHydrant();
    Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), currHydrant.getIndex().getInterval());
    FireHydrant swapHydrant = sink.swap();
    sink.add(new InputRow() {

        @Override
        public List<String> getDimensions() {
            return new ArrayList<>();
        }

        @Override
        public long getTimestampFromEpoch() {
            return DateTimes.of("2013-01-01").getMillis();
        }

        @Override
        public DateTime getTimestamp() {
            return DateTimes.of("2013-01-01");
        }

        @Override
        public List<String> getDimension(String dimension) {
            return new ArrayList<>();
        }

        @Override
        public Number getMetric(String metric) {
            return 0;
        }

        @Override
        public Object getRaw(String dimension) {
            return null;
        }

        @Override
        public int compareTo(Row o) {
            return 0;
        }
    }, false);
    Assert.assertEquals(currHydrant, swapHydrant);
    Assert.assertNotSame(currHydrant, sink.getCurrHydrant());
    Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval());
    Assert.assertEquals(2, Iterators.size(sink.iterator()));
}
Also used : Period(org.joda.time.Period) RealtimeTuningConfig(org.apache.druid.segment.indexing.RealtimeTuningConfig) DateTime(org.joda.time.DateTime) DataSchema(org.apache.druid.segment.indexing.DataSchema) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InputRow(org.apache.druid.data.input.InputRow) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Row(org.apache.druid.data.input.Row) InputRow(org.apache.druid.data.input.InputRow) FireHydrant(org.apache.druid.segment.realtime.FireHydrant) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 74 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class SinkTest method testDedup.

@Test
public void testDedup() throws Exception {
    final DataSchema schema = new DataSchema("test", new TimestampSpec(null, null, null), DimensionsSpec.EMPTY, new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), null);
    final Interval interval = Intervals.of("2013-01-01/2013-01-02");
    final String version = DateTimes.nowUtc().toString();
    RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig(null, 100, null, null, new Period("P1Y"), null, null, null, null, null, null, null, null, 0, 0, null, null, null, null, "dedupColumn");
    final Sink sink = new Sink(interval, schema, tuningConfig.getShardSpec(), version, tuningConfig.getAppendableIndexSpec(), tuningConfig.getMaxRowsInMemory(), tuningConfig.getMaxBytesInMemoryOrDefault(), true, tuningConfig.getDedupColumn());
    int rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value1", "dedupColumn", "v1")), false).getRowCount();
    Assert.assertTrue(rows > 0);
    // dedupColumn is null
    rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value2")), false).getRowCount();
    Assert.assertTrue(rows > 0);
    // dedupColumn is null
    rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value3")), false).getRowCount();
    Assert.assertTrue(rows > 0);
    rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value4", "dedupColumn", "v2")), false).getRowCount();
    Assert.assertTrue(rows > 0);
    rows = sink.add(new MapBasedInputRow(DateTimes.of("2013-01-01"), ImmutableList.of("field", "dedupColumn"), ImmutableMap.of("field1", "value5", "dedupColumn", "v1")), false).getRowCount();
    Assert.assertTrue(rows == -2);
}
Also used : DataSchema(org.apache.druid.segment.indexing.DataSchema) UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) Period(org.joda.time.Period) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) RealtimeTuningConfig(org.apache.druid.segment.indexing.RealtimeTuningConfig) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 75 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class ClientQuerySegmentWalkerTest method testGroupByOnGroupByOnTable.

@Test
public void testGroupByOnGroupByOnTable() {
    final GroupByQuery subquery = (GroupByQuery) GroupByQuery.builder().setDataSource(FOO).setGranularity(Granularities.ALL).setInterval(Collections.singletonList(INTERVAL)).setDimensions(DefaultDimensionSpec.of("s")).build().withId("queryId");
    final GroupByQuery query = (GroupByQuery) GroupByQuery.builder().setDataSource(new QueryDataSource(subquery)).setGranularity(Granularities.ALL).setInterval(Intervals.ONLY_ETERNITY).setAggregatorSpecs(new CountAggregatorFactory("cnt")).build().withId(DUMMY_QUERY_ID);
    testQuery(query, // GroupBy handles its own subqueries; only the inner one will go to the cluster. Also, it gets a subquery id
    ImmutableList.of(ExpectedQuery.cluster(subquery.withSubQueryId("1.1"))), ImmutableList.of(new Object[] { 3L }));
    Assert.assertEquals(1, scheduler.getTotalRun().get());
    Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
    Assert.assertEquals(1, scheduler.getTotalAcquired().get());
    Assert.assertEquals(1, scheduler.getTotalReleased().get());
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Test(org.junit.Test)

Aggregations

CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)300 Test (org.junit.Test)249 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)103 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)81 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)62 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)50 TableDataSource (org.apache.druid.query.TableDataSource)44 QueryDataSource (org.apache.druid.query.QueryDataSource)41 TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)40 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)37 Result (org.apache.druid.query.Result)36 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)35 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)30 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)30 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)28 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)27 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)26 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)26 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)26 Parameters (junitparams.Parameters)24