Search in sources :

Example 41 with SelectorDimFilter

use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.

the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggValueNotPresent.

@Test
public void testTimeSeriesWithFilteredAggValueNotPresent() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Lists.newArrayList(Iterables.concat(aggregatorFactoryList, Collections.singletonList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new NotDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "LolLol", null))))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
    Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
    List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
    assertExpectedResults(expectedResults, actualResults);
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 42 with SelectorDimFilter

use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.

the class TopNQueryRunnerTest method testTopNOverNullDimensionWithFilter.

@Test
public void testTopNOverNullDimensionWithFilter() {
    TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension("null_column").filters(new SelectorDimFilter("null_column", null, null)).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(4).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(Lists.newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
    Map<String, Object> map = new HashMap<>();
    map.put("null_column", null);
    map.put("rows", 1209L);
    map.put("index", 503332.5071372986D);
    map.put("addRowsIndexConstant", 504542.5071372986D);
    map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
    map.put("maxIndex", 1870.061029D);
    map.put("minIndex", 59.02102279663086D);
    List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Collections.singletonList(map))));
    assertExpectedResults(expectedResults, query);
}
Also used : DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) HashMap(java.util.HashMap) DoubleMinAggregatorFactory(org.apache.druid.query.aggregation.DoubleMinAggregatorFactory) Result(org.apache.druid.query.Result) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 43 with SelectorDimFilter

use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.

the class NewestSegmentFirstPolicyTest method testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentFilter.

@Test
public void testIteratorReturnsSegmentsAsSegmentsWasCompactedAndHaveDifferentFilter() throws Exception {
    NullHandling.initializeForTests();
    // Same indexSpec as what is set in the auto compaction config
    Map<String, Object> indexSpec = mapper.convertValue(new IndexSpec(), new TypeReference<Map<String, Object>>() {
    });
    // Same partitionsSpec as what is set in the auto compaction config
    PartitionsSpec partitionsSpec = NewestSegmentFirstIterator.findPartitionsSpecFromConfig(ClientCompactionTaskQueryTuningConfig.from(null, null));
    // Create segments that were compacted (CompactionState != null) and have
    // filter=SelectorDimFilter("dim1", "foo", null) for interval 2017-10-01T00:00:00/2017-10-02T00:00:00,
    // filter=SelectorDimFilter("dim1", "bar", null) for interval 2017-10-02T00:00:00/2017-10-03T00:00:00,
    // filter=null for interval 2017-10-03T00:00:00/2017-10-04T00:00:00 (filter was not set during last compaction)
    // and transformSpec=null for interval 2017-10-04T00:00:00/2017-10-05T00:00:00 (transformSpec was not set during last compaction)
    final VersionedIntervalTimeline<String, DataSegment> timeline = createTimeline(new SegmentGenerateSpec(Intervals.of("2017-10-01T00:00:00/2017-10-02T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, mapper.readValue(mapper.writeValueAsString(new TransformSpec(new SelectorDimFilter("dim1", "foo", null), null)), new TypeReference<Map<String, Object>>() {
    }), indexSpec, null)), new SegmentGenerateSpec(Intervals.of("2017-10-02T00:00:00/2017-10-03T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, mapper.readValue(mapper.writeValueAsString(new TransformSpec(new SelectorDimFilter("dim1", "bar", null), null)), new TypeReference<Map<String, Object>>() {
    }), indexSpec, null)), new SegmentGenerateSpec(Intervals.of("2017-10-03T00:00:00/2017-10-04T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, mapper.readValue(mapper.writeValueAsString(new TransformSpec(null, null)), new TypeReference<Map<String, Object>>() {
    }), indexSpec, null)), new SegmentGenerateSpec(Intervals.of("2017-10-04T00:00:00/2017-10-05T00:00:00"), new Period("P1D"), null, new CompactionState(partitionsSpec, null, null, null, indexSpec, null)));
    // Auto compaction config sets filter=SelectorDimFilter("dim1", "bar", null)
    CompactionSegmentIterator iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(130000, new Period("P0D"), null, null, new UserCompactionTaskTransformConfig(new SelectorDimFilter("dim1", "bar", null)), null)), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
    // We should get interval 2017-10-01T00:00:00/2017-10-02T00:00:00, interval 2017-10-04T00:00:00/2017-10-05T00:00:00, and interval 2017-10-03T00:00:00/2017-10-04T00:00:00.
    Assert.assertTrue(iterator.hasNext());
    List<DataSegment> expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-04T00:00:00/2017-10-05T00:00:00"), Partitions.ONLY_COMPLETE));
    Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
    Assert.assertTrue(iterator.hasNext());
    expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-03T00:00:00/2017-10-04T00:00:00"), Partitions.ONLY_COMPLETE));
    Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
    Assert.assertTrue(iterator.hasNext());
    expectedSegmentsToCompact = new ArrayList<>(timeline.findNonOvershadowedObjectsInInterval(Intervals.of("2017-10-01T00:00:00/2017-10-02T00:00:00"), Partitions.ONLY_COMPLETE));
    Assert.assertEquals(ImmutableSet.copyOf(expectedSegmentsToCompact), ImmutableSet.copyOf(iterator.next()));
    // No more
    Assert.assertFalse(iterator.hasNext());
    // Auto compaction config sets filter=null
    iterator = policy.reset(ImmutableMap.of(DATA_SOURCE, createCompactionConfig(130000, new Period("P0D"), null, null, new UserCompactionTaskTransformConfig(null), null)), ImmutableMap.of(DATA_SOURCE, timeline), Collections.emptyMap());
    // No more
    Assert.assertFalse(iterator.hasNext());
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) ArrayList(java.util.ArrayList) Period(org.joda.time.Period) UserCompactionTaskTransformConfig(org.apache.druid.server.coordinator.UserCompactionTaskTransformConfig) DataSegment(org.apache.druid.timeline.DataSegment) TransformSpec(org.apache.druid.segment.transform.TransformSpec) PartitionsSpec(org.apache.druid.indexer.partitions.PartitionsSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) CompactionState(org.apache.druid.timeline.CompactionState) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Example 44 with SelectorDimFilter

use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.

the class CompactSegmentsTest method testCompactWithTransformSpec.

@Test
public void testCompactWithTransformSpec() {
    NullHandling.initializeForTests();
    final HttpIndexingServiceClient mockIndexingServiceClient = Mockito.mock(HttpIndexingServiceClient.class);
    final CompactSegments compactSegments = new CompactSegments(COORDINATOR_CONFIG, JSON_MAPPER, mockIndexingServiceClient);
    final List<DataSourceCompactionConfig> compactionConfigs = new ArrayList<>();
    final String dataSource = DATA_SOURCE_PREFIX + 0;
    compactionConfigs.add(new DataSourceCompactionConfig(dataSource, 0, 500L, null, // smaller than segment interval
    new Period("PT0H"), new UserCompactionTaskQueryTuningConfig(null, null, null, null, partitionsSpec, null, null, null, null, null, 3, null, null, null, null, null, null), null, null, null, new UserCompactionTaskTransformConfig(new SelectorDimFilter("dim1", "foo", null)), null, null));
    doCompactSegments(compactSegments, compactionConfigs);
    ArgumentCaptor<ClientCompactionTaskTransformSpec> transformSpecArgumentCaptor = ArgumentCaptor.forClass(ClientCompactionTaskTransformSpec.class);
    Mockito.verify(mockIndexingServiceClient).compactSegments(ArgumentMatchers.anyString(), ArgumentMatchers.any(), ArgumentMatchers.anyInt(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), transformSpecArgumentCaptor.capture(), ArgumentMatchers.any(), ArgumentMatchers.any());
    ClientCompactionTaskTransformSpec actual = transformSpecArgumentCaptor.getValue();
    Assert.assertNotNull(actual);
    Assert.assertEquals(new SelectorDimFilter("dim1", "foo", null), actual.getFilter());
}
Also used : HttpIndexingServiceClient(org.apache.druid.client.indexing.HttpIndexingServiceClient) DataSourceCompactionConfig(org.apache.druid.server.coordinator.DataSourceCompactionConfig) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) ArrayList(java.util.ArrayList) Period(org.joda.time.Period) UserCompactionTaskQueryTuningConfig(org.apache.druid.server.coordinator.UserCompactionTaskQueryTuningConfig) UserCompactionTaskTransformConfig(org.apache.druid.server.coordinator.UserCompactionTaskTransformConfig) ClientCompactionTaskTransformSpec(org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec) Test(org.junit.Test)

Example 45 with SelectorDimFilter

use of org.apache.druid.query.filter.SelectorDimFilter in project druid by druid-io.

the class DataSourceCompactionConfigTest method testSerdeTransformSpec.

@Test
public void testSerdeTransformSpec() throws IOException {
    NullHandling.initializeForTests();
    final DataSourceCompactionConfig config = new DataSourceCompactionConfig("dataSource", null, 500L, null, new Period(3600), null, null, null, null, new UserCompactionTaskTransformConfig(new SelectorDimFilter("dim1", "foo", null)), null, ImmutableMap.of("key", "val"));
    final String json = OBJECT_MAPPER.writeValueAsString(config);
    final DataSourceCompactionConfig fromJson = OBJECT_MAPPER.readValue(json, DataSourceCompactionConfig.class);
    Assert.assertEquals(config.getDataSource(), fromJson.getDataSource());
    Assert.assertEquals(25, fromJson.getTaskPriority());
    Assert.assertEquals(config.getInputSegmentSizeBytes(), fromJson.getInputSegmentSizeBytes());
    Assert.assertEquals(config.getMaxRowsPerSegment(), fromJson.getMaxRowsPerSegment());
    Assert.assertEquals(config.getSkipOffsetFromLatest(), fromJson.getSkipOffsetFromLatest());
    Assert.assertEquals(config.getTuningConfig(), fromJson.getTuningConfig());
    Assert.assertEquals(config.getTaskContext(), fromJson.getTaskContext());
    Assert.assertEquals(config.getTransformSpec(), fromJson.getTransformSpec());
}
Also used : SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Period(org.joda.time.Period) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)206 Test (org.junit.Test)184 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)78 OrDimFilter (org.apache.druid.query.filter.OrDimFilter)54 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)41 AndDimFilter (org.apache.druid.query.filter.AndDimFilter)39 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)37 BoundDimFilter (org.apache.druid.query.filter.BoundDimFilter)34 ArrayList (java.util.ArrayList)29 Filter (org.apache.druid.query.filter.Filter)27 Result (org.apache.druid.query.Result)25 DimFilter (org.apache.druid.query.filter.DimFilter)24 InDimFilter (org.apache.druid.query.filter.InDimFilter)24 NotDimFilter (org.apache.druid.query.filter.NotDimFilter)24 ExpressionDimFilter (org.apache.druid.query.filter.ExpressionDimFilter)23 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)22 SelectorFilter (org.apache.druid.segment.filter.SelectorFilter)22 JoinFilterPreAnalysis (org.apache.druid.segment.join.filter.JoinFilterPreAnalysis)21 LookupExtractionFn (org.apache.druid.query.lookup.LookupExtractionFn)20 HashMap (java.util.HashMap)19