Search in sources :

Example 11 with OrDimFilter

use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.

the class GroupByQueryRunnerTest method testDimFilterHavingSpec.

@Test
public void testDimFilterHavingSpec() {
    final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
    GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.INDEX_LONG_MIN, QueryRunnerTestHelper.INDEX_LONG_MAX, QueryRunnerTestHelper.INDEX_DOUBLE_MIN, QueryRunnerTestHelper.INDEX_DOUBLE_MAX, QueryRunnerTestHelper.INDEX_FLOAT_MIN, QueryRunnerTestHelper.INDEX_FLOAT_MAX).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(havingSpec);
    final GroupByQuery fullQuery = builder.build();
    List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 105L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 112L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 105.735462D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 112.987027D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 105.73546F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 112.98703F), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 107L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1193L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 107.047773D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1193.556278D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 107.047775F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1193.5563F), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 122L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1321L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 122.141707D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1321.375057D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 122.14171F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1321.375F));
    TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "dimfilter-havingspec");
}
Also used : DimFilterHavingSpec(org.apache.druid.query.groupby.having.DimFilterHavingSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 12 with OrDimFilter

use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.

the class GroupByQueryRunnerTest method testGroupByTimeExtraction.

@Test
public void testGroupByTimeExtraction() {
    // Cannot vectorize due to extraction dimension spec.
    cannotVectorize();
    GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).setDimensions(new DefaultDimensionSpec("market", "market"), new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "dayOfWeek", new TimeFormatExtractionFn("EEEE", null, null, null, false))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM).setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimFilter(new OrDimFilter(Arrays.asList(new SelectorDimFilter("market", "spot", null), new SelectorDimFilter("market", "upfront", null)))).build();
    List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "1970-01-01", "dayOfWeek", "Friday", "market", "spot", "index", 13219.574157714844, "rows", 117L, "addRowsIndexConstant", 13337.574157714844), makeRow(query, "1970-01-01", "dayOfWeek", "Monday", "market", "spot", "index", 13557.738830566406, "rows", 117L, "addRowsIndexConstant", 13675.738830566406), makeRow(query, "1970-01-01", "dayOfWeek", "Saturday", "market", "spot", "index", 13493.751281738281, "rows", 117L, "addRowsIndexConstant", 13611.751281738281), makeRow(query, "1970-01-01", "dayOfWeek", "Sunday", "market", "spot", "index", 13585.541015625, "rows", 117L, "addRowsIndexConstant", 13703.541015625), makeRow(query, "1970-01-01", "dayOfWeek", "Thursday", "market", "spot", "index", 14279.127197265625, "rows", 126L, "addRowsIndexConstant", 14406.127197265625), makeRow(query, "1970-01-01", "dayOfWeek", "Tuesday", "market", "spot", "index", 13199.471435546875, "rows", 117L, "addRowsIndexConstant", 13317.471435546875), makeRow(query, "1970-01-01", "dayOfWeek", "Wednesday", "market", "spot", "index", 14271.368591308594, "rows", 126L, "addRowsIndexConstant", 14398.368591308594), makeRow(query, "1970-01-01", "dayOfWeek", "Friday", "market", "upfront", "index", 27297.8623046875, "rows", 26L, "addRowsIndexConstant", 27324.8623046875), makeRow(query, "1970-01-01", "dayOfWeek", "Monday", "market", "upfront", "index", 27619.58447265625, "rows", 26L, "addRowsIndexConstant", 27646.58447265625), makeRow(query, "1970-01-01", "dayOfWeek", "Saturday", "market", "upfront", "index", 27820.83154296875, "rows", 26L, "addRowsIndexConstant", 27847.83154296875), makeRow(query, "1970-01-01", "dayOfWeek", "Sunday", "market", "upfront", "index", 24791.223876953125, "rows", 26L, "addRowsIndexConstant", 24818.223876953125), makeRow(query, "1970-01-01", "dayOfWeek", "Thursday", "market", "upfront", "index", 28562.748901367188, "rows", 28L, "addRowsIndexConstant", 28591.748901367188), makeRow(query, "1970-01-01", "dayOfWeek", "Tuesday", "market", "upfront", "index", 26968.280639648438, "rows", 26L, "addRowsIndexConstant", 26995.280639648438), makeRow(query, "1970-01-01", "dayOfWeek", "Wednesday", "market", "upfront", "index", 28985.5751953125, "rows", 28L, "addRowsIndexConstant", 29014.5751953125));
    Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
    TestHelper.assertExpectedObjects(expectedResults, results, "time-extraction");
}
Also used : TimeFormatExtractionFn(org.apache.druid.query.extraction.TimeFormatExtractionFn) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) ExtractionDimensionSpec(org.apache.druid.query.dimension.ExtractionDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 13 with OrDimFilter

use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.

the class CachingClusteredClientTest method testSingleDimensionPruning.

@Test
public void testSingleDimensionPruning() {
    DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim1", "from", "to", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim2", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim2", "aaa", "hi", false, false, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC)));
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS);
    TimeseriesQuery query = builder.randomQueryId().build();
    final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
    final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
    final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
    QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    ServerSelector selector1 = makeMockSingleDimensionSelector(lastServer, "dim1", null, "b", 0);
    ServerSelector selector2 = makeMockSingleDimensionSelector(lastServer, "dim1", "e", "f", 1);
    ServerSelector selector3 = makeMockSingleDimensionSelector(lastServer, "dim1", "hi", "zzz", 2);
    ServerSelector selector4 = makeMockSingleDimensionSelector(lastServer, "dim2", "a", "e", 0);
    ServerSelector selector5 = makeMockSingleDimensionSelector(lastServer, "dim2", null, null, 1);
    ServerSelector selector6 = makeMockSingleDimensionSelector(lastServer, "other", "b", null, 0);
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 3, selector1));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 3, selector2));
    timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 3, selector3));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 2, selector4));
    timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 2, selector5));
    timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 1, selector6));
    final Capture<QueryPlus> capture = Capture.newInstance();
    final Capture<ResponseContext> contextCap = Capture.newInstance();
    QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
    EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
    EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
    EasyMock.replay(serverView);
    EasyMock.replay(mockRunner);
    List<SegmentDescriptor> descriptors = new ArrayList<>();
    descriptors.add(new SegmentDescriptor(interval1, "v", 0));
    descriptors.add(new SegmentDescriptor(interval1, "v", 2));
    descriptors.add(new SegmentDescriptor(interval2, "v", 1));
    descriptors.add(new SegmentDescriptor(interval3, "v", 0));
    MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(descriptors);
    runner.run(QueryPlus.wrap(query)).toList();
    Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
Also used : MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ArrayList(java.util.ArrayList) QueryableDruidServer(org.apache.druid.client.selector.QueryableDruidServer) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) ServerSelector(org.apache.druid.client.selector.ServerSelector) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Druids(org.apache.druid.query.Druids) ResponseContext(org.apache.druid.query.context.ResponseContext) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) InDimFilter(org.apache.druid.query.filter.InDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Interval(org.joda.time.Interval) QueryPlus(org.apache.druid.query.QueryPlus) Test(org.junit.Test)

Example 14 with OrDimFilter

use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.

the class NativeQueryMaker method runQuery.

@Override
public Sequence<Object[]> runQuery(final DruidQuery druidQuery) {
    final Query<?> query = druidQuery.getQuery();
    if (plannerContext.getPlannerConfig().isRequireTimeCondition() && !(druidQuery.getDataSource() instanceof InlineDataSource)) {
        if (Intervals.ONLY_ETERNITY.equals(findBaseDataSourceIntervals(query))) {
            throw new CannotBuildQueryException("requireTimeCondition is enabled, all queries must include a filter condition on the __time column");
        }
    }
    int numFilters = plannerContext.getPlannerConfig().getMaxNumericInFilters();
    // Instead of IN(v1,v2,v3) user should specify IN('v1','v2','v3')
    if (numFilters != PlannerConfig.NUM_FILTER_NOT_USED) {
        if (query.getFilter() instanceof OrDimFilter) {
            OrDimFilter orDimFilter = (OrDimFilter) query.getFilter();
            int numBoundFilters = 0;
            for (DimFilter filter : orDimFilter.getFields()) {
                numBoundFilters += filter instanceof BoundDimFilter ? 1 : 0;
            }
            if (numBoundFilters > numFilters) {
                String dimension = ((BoundDimFilter) (orDimFilter.getFields().get(0))).getDimension();
                throw new UOE(StringUtils.format("The number of values in the IN clause for [%s] in query exceeds configured maxNumericFilter limit of [%s] for INs. Cast [%s] values of IN clause to String", dimension, numFilters, orDimFilter.getFields().size()));
            }
        }
    }
    final List<String> rowOrder;
    if (query instanceof TimeseriesQuery && !druidQuery.getGrouping().getDimensions().isEmpty()) {
        // Hack for timeseries queries: when generating them, DruidQuery.toTimeseriesQuery translates a dimension
        // based on a timestamp_floor expression into a 'granularity'. This is not reflected in the druidQuery's
        // output row signature, so we have to account for it here.
        // TODO: We can remove this once https://github.com/apache/druid/issues/9974 is done.
        final String timeDimension = Iterables.getOnlyElement(druidQuery.getGrouping().getDimensions()).getOutputName();
        rowOrder = druidQuery.getOutputRowSignature().getColumnNames().stream().map(f -> timeDimension.equals(f) ? ColumnHolder.TIME_COLUMN_NAME : f).collect(Collectors.toList());
    } else {
        rowOrder = druidQuery.getOutputRowSignature().getColumnNames();
    }
    final List<SqlTypeName> columnTypes = druidQuery.getOutputRowType().getFieldList().stream().map(f -> f.getType().getSqlTypeName()).collect(Collectors.toList());
    return execute(query, mapColumnList(rowOrder, fieldMapping), mapColumnList(columnTypes, fieldMapping));
}
Also used : Arrays(java.util.Arrays) DimensionHandlerUtils(org.apache.druid.segment.DimensionHandlerUtils) Object2IntOpenHashMap(it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap) IAE(org.apache.druid.java.util.common.IAE) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) Collection(java.util.Collection) StringUtils(org.apache.druid.java.util.common.StringUtils) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ISE(org.apache.druid.java.util.common.ISE) UUID(java.util.UUID) PlannerConfig(org.apache.druid.sql.calcite.planner.PlannerConfig) Collectors(java.util.stream.Collectors) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) List(java.util.List) DimFilter(org.apache.druid.query.filter.DimFilter) Hook(org.apache.calcite.runtime.Hook) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) QueryLifecycleFactory(org.apache.druid.server.QueryLifecycleFactory) Iterables(com.google.common.collect.Iterables) ComparableList(org.apache.druid.segment.data.ComparableList) Intervals(org.apache.druid.java.util.common.Intervals) CannotBuildQueryException(org.apache.druid.sql.calcite.rel.CannotBuildQueryException) QueryLifecycle(org.apache.druid.server.QueryLifecycle) ArrayList(java.util.ArrayList) AuthenticationResult(org.apache.druid.server.security.AuthenticationResult) Interval(org.joda.time.Interval) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) Query(org.apache.druid.query.Query) Pair(org.apache.calcite.util.Pair) PlannerContext(org.apache.druid.sql.calcite.planner.PlannerContext) ComparableStringArray(org.apache.druid.segment.data.ComparableStringArray) UOE(org.apache.druid.java.util.common.UOE) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) Sequences(org.apache.druid.java.util.common.guava.Sequences) DruidQuery(org.apache.druid.sql.calcite.rel.DruidQuery) RelDataType(org.apache.calcite.rel.type.RelDataType) Access(org.apache.druid.server.security.Access) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) NlsString(org.apache.calcite.util.NlsString) InlineDataSource(org.apache.druid.query.InlineDataSource) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DateTime(org.joda.time.DateTime) QueryToolChest(org.apache.druid.query.QueryToolChest) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) Ints(com.google.common.primitives.Ints) Object2IntMap(it.unimi.dsi.fastutil.objects.Object2IntMap) NullHandling(org.apache.druid.common.config.NullHandling) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Calcites(org.apache.druid.sql.calcite.planner.Calcites) Evals(org.apache.druid.math.expr.Evals) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) UOE(org.apache.druid.java.util.common.UOE) NlsString(org.apache.calcite.util.NlsString) CannotBuildQueryException(org.apache.druid.sql.calcite.rel.CannotBuildQueryException) InlineDataSource(org.apache.druid.query.InlineDataSource) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) DimFilter(org.apache.druid.query.filter.DimFilter) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter)

Example 15 with OrDimFilter

use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.

the class HashJoinSegmentStorageAdapterTest method test_makeCursors_factToCountryLeftWithFilterOnJoinableUsingLookup.

@Test
public void test_makeCursors_factToCountryLeftWithFilterOnJoinableUsingLookup() {
    List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT));
    Filter filter = new OrDimFilter(new SelectorDimFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "k", "DE", null), new SelectorDimFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "v", "Norway", null)).toFilter();
    JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(filter, joinableClauses, VirtualColumns.EMPTY);
    JoinTestHelper.verifyCursors(new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis).makeCursors(filter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "k", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "v"), ImmutableList.of(new Object[] { "Diskussion:Sebastian Schulz", "DE", "DE", "Germany" }, new Object[] { "Алиса в Зазеркалье", "NO", "NO", "Norway" }));
}
Also used : JoinFilterPreAnalysis(org.apache.druid.segment.join.filter.JoinFilterPreAnalysis) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) SelectorFilter(org.apache.druid.segment.filter.SelectorFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) ExpressionDimFilter(org.apache.druid.query.filter.ExpressionDimFilter) Filter(org.apache.druid.query.filter.Filter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) Test(org.junit.Test)

Aggregations

OrDimFilter (org.apache.druid.query.filter.OrDimFilter)34 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)25 Test (org.junit.Test)23 AndDimFilter (org.apache.druid.query.filter.AndDimFilter)18 DimFilter (org.apache.druid.query.filter.DimFilter)16 BoundDimFilter (org.apache.druid.query.filter.BoundDimFilter)15 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)12 ArrayList (java.util.ArrayList)10 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)8 List (java.util.List)7 InDimFilter (org.apache.druid.query.filter.InDimFilter)6 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)5 NotDimFilter (org.apache.druid.query.filter.NotDimFilter)5 TimeFormatExtractionFn (org.apache.druid.query.extraction.TimeFormatExtractionFn)4 Filter (org.apache.druid.query.filter.Filter)4 HashMap (java.util.HashMap)3 ISE (org.apache.druid.java.util.common.ISE)3 Result (org.apache.druid.query.Result)3 ExtractionDimensionSpec (org.apache.druid.query.dimension.ExtractionDimensionSpec)3 RegexDimFilter (org.apache.druid.query.filter.RegexDimFilter)3