use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithMultiValueDimFilterAndOr2.
@Test
public void testTimeseriesWithMultiValueDimFilterAndOr2() {
AndDimFilter andDimFilter = new AndDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new OrDimFilter(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", "b"));
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters(andDimFilter).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(aggregatorFactoryList).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
AndDimFilter andDimFilter2 = new AndDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new OrDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", "business"));
TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters(andDimFilter2).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(aggregatorFactoryList).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
Iterable<Result<TimeseriesResultValue>> expectedResults = runner.run(QueryPlus.wrap(query2)).toList();
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithOtherMultiDimFilter.
@Test
public void testTimeseriesWithOtherMultiDimFilter() {
AndDimFilter andDimFilter = new AndDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "business", null));
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters(andDimFilter).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "index", new Float(118.570340).doubleValue(), "addRowsIndexConstant", new Float(120.570340).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "index", new Float(112.987027).doubleValue(), "addRowsIndexConstant", new Float(114.987027).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
assertExpectedResults(expectedResults, results);
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class FilterPartitionTest method testBasicPreAndPostFilterWithNulls.
@Test
public void testBasicPreAndPostFilterWithNulls() {
if (NullHandling.replaceWithDefault()) {
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim2", "a", null), new NoBitmapSelectorDimFilter("dim1", null, null))), ImmutableList.of("0"));
} else {
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim2", "a", null), new NoBitmapSelectorDimFilter("dim1", null, null))), ImmutableList.of());
}
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "10", null), new NoBitmapSelectorDimFilter("dim2", null, null))), ImmutableList.of("1"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "1", null), new NoBitmapSelectorDimFilter("dim2", "foo", null))), ImmutableList.of("9"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "HELLO", null), new NoBitmapSelectorDimFilter("dim2", "bar", null))), ImmutableList.of());
assertFilterMatches(new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim2", "bar", null), new SelectorDimFilter("dim1", "NOT_A_VALUE", null))), ImmutableList.of());
if (NullHandling.replaceWithDefault()) {
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("0"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("2"));
} else {
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-", JS_EXTRACTION_FN))), ImmutableList.of("0"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN))), ImmutableList.of());
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-", JS_EXTRACTION_FN))), ImmutableList.of("2"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN))), ImmutableList.of());
}
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-10", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("1"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN))), ImmutableList.of("9"));
assertFilterMatches(new AndDimFilter(Arrays.asList(new SelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-bar", JS_EXTRACTION_FN))), ImmutableList.of());
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class FilterPartitionTest method testDistributeOrCNF.
@Test
public void testDistributeOrCNF() {
DimFilter dimFilter1 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dim0", "6", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "abdef", null), new SelectorDimFilter("dim2", "c", null)))));
Filter filter1 = dimFilter1.toFilter();
Filter filter1CNF = Filters.toCnf(filter1);
Assert.assertEquals(AndFilter.class, filter1CNF.getClass());
Assert.assertEquals(2, ((AndFilter) filter1CNF).getFilters().size());
assertFilterMatches(dimFilter1, ImmutableList.of("4", "6"));
DimFilter dimFilter2 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dim0", "2", null), new SelectorDimFilter("dim0", "3", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "HELLO", null), new SelectorDimFilter("dim2", "foo", null)))));
assertFilterMatches(dimFilter2, ImmutableList.of("2", "3", "7"));
DimFilter dimFilter3 = new OrDimFilter(Arrays.asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "1", null), new SelectorDimFilter("dim2", "foo", null)))));
assertFilterMatches(dimFilter3, ImmutableList.of("2", "3", "4", "6", "7", "9"));
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class CachingClusteredClientTest method testTimeSeriesWithFilter.
@Test
public void testTimeSeriesWithFilter() {
DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim0", "1", null), new BoundDimFilter("dim0", "222", "333", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim1", Arrays.asList("0", "1", "2", "3", "4"), null), new BoundDimFilter("dim1", "0", "3", false, true, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim1", "1", "9999", true, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(filter).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
/*
For dim0 (2011-01-01/2011-01-05), the combined range is {[1,1], [222,333]}, so segments [-inf,1], [1,2], [2,3], and
[3,4] is needed
For dim1 (2011-01-06/2011-01-10), the combined range for the bound filters is {(1,3)}, combined this with the in
filter result in {[2,2]}, so segments [1,2] and [2,3] is needed
*/
List<Iterable<Result<TimeseriesResultValue>>> expectedResult = Arrays.asList(makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000, DateTimes.of("2011-01-02"), 10, 1252, DateTimes.of("2011-01-03"), 20, 6213, DateTimes.of("2011-01-04"), 30, 743), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020, DateTimes.of("2011-01-08"), 70, 250));
testQueryCachingWithFilter(runner, 3, builder.randomQueryId().build(), expectedResult, Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-02"), 10, 1252), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-03"), 20, 6213), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 30, 743), Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-05"), 40, 6000), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-06"), 50, 425), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-08"), 70, 250), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-09"), 23, 85312), Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-10"), 100, 512));
}
Aggregations