use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchMultiAndFilter.
@Test
public void testSearchMultiAndFilter() {
List<SearchHit> expectedHits = new ArrayList<>();
expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93));
DimFilter filter = new AndDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null));
checkSearchQuery(Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).filters(filter).dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).query("a").build(), expectedHits);
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithAllFiltersOnNullDimsWithExtractionFns.
@Test
public void testGroupByWithAllFiltersOnNullDimsWithExtractionFns() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("", "EMPTY");
extractionMap.put(null, "EMPTY");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
String jsFn = "function(x) { return(x === 'EMPTY') }";
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("null_column", "EMPTY", extractionFn));
superFilterList.add(new InDimFilter("null_column", Arrays.asList("NOT-EMPTY", "FOOBAR", "EMPTY"), extractionFn));
superFilterList.add(new BoundDimFilter("null_column", "EMPTY", "EMPTY", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("null_column", "EMPTY", extractionFn));
superFilterList.add(new SearchQueryDimFilter("null_column", new ContainsSearchQuerySpec("EMPTY", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("null_column", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("null_column", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setDimFilter(superFilter).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", null, "rows", 13L, "idx", 6619L), makeRow(query, "2011-04-02", "alias", null, "rows", 13L, "idx", 5827L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "extraction");
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsWithAllFiltersWithExtractionFns.
@Test
public void testBySegmentResultsWithAllFiltersWithExtractionFns() {
String extractionJsFn = "function(str) { return 'super-' + str; }";
String jsFn = "function(x) { return(x === 'super-mezzanine') }";
ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new InDimFilter("quality", Arrays.asList("not-super-mezzanine", "FOOBAR", "super-mezzanine"), extractionFn));
superFilterList.add(new BoundDimFilter("quality", "super-mezzanine", "super-mezzanine", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new SearchQueryDimFilter("quality", new ContainsSearchQuerySpec("super-mezzanine", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("quality", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(superFilter).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-filter");
exec.shutdownNow();
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testDimFilterHavingSpec.
@Test
public void testDimFilterHavingSpec() {
final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.INDEX_LONG_MIN, QueryRunnerTestHelper.INDEX_LONG_MAX, QueryRunnerTestHelper.INDEX_DOUBLE_MIN, QueryRunnerTestHelper.INDEX_DOUBLE_MAX, QueryRunnerTestHelper.INDEX_FLOAT_MIN, QueryRunnerTestHelper.INDEX_FLOAT_MAX).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(havingSpec);
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 105L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 112L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 105.735462D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 112.987027D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 105.73546F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 112.98703F), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 107L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1193L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 107.047773D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1193.556278D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 107.047775F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1193.5563F), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 122L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1321L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 122.141707D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1321.375057D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 122.14171F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1321.375F));
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "dimfilter-havingspec");
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class CachingClusteredClientTest method testSingleDimensionPruning.
@Test
public void testSingleDimensionPruning() {
DimFilter filter = new AndDimFilter(new OrDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim1", "from", "to", false, false, false, null, StringComparators.LEXICOGRAPHIC)), new AndDimFilter(new InDimFilter("dim2", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim2", "aaa", "hi", false, false, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC)));
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS);
TimeseriesQuery query = builder.randomQueryId().build();
final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
final DruidServer lastServer = servers[random.nextInt(servers.length)];
ServerSelector selector1 = makeMockSingleDimensionSelector(lastServer, "dim1", null, "b", 0);
ServerSelector selector2 = makeMockSingleDimensionSelector(lastServer, "dim1", "e", "f", 1);
ServerSelector selector3 = makeMockSingleDimensionSelector(lastServer, "dim1", "hi", "zzz", 2);
ServerSelector selector4 = makeMockSingleDimensionSelector(lastServer, "dim2", "a", "e", 0);
ServerSelector selector5 = makeMockSingleDimensionSelector(lastServer, "dim2", null, null, 1);
ServerSelector selector6 = makeMockSingleDimensionSelector(lastServer, "other", "b", null, 0);
timeline.add(interval1, "v", new NumberedPartitionChunk<>(0, 3, selector1));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(1, 3, selector2));
timeline.add(interval1, "v", new NumberedPartitionChunk<>(2, 3, selector3));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(0, 2, selector4));
timeline.add(interval2, "v", new NumberedPartitionChunk<>(1, 2, selector5));
timeline.add(interval3, "v", new NumberedPartitionChunk<>(0, 1, selector6));
final Capture<QueryPlus> capture = Capture.newInstance();
final Capture<ResponseContext> contextCap = Capture.newInstance();
QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
EasyMock.replay(serverView);
EasyMock.replay(mockRunner);
List<SegmentDescriptor> descriptors = new ArrayList<>();
descriptors.add(new SegmentDescriptor(interval1, "v", 0));
descriptors.add(new SegmentDescriptor(interval1, "v", 2));
descriptors.add(new SegmentDescriptor(interval2, "v", 1));
descriptors.add(new SegmentDescriptor(interval3, "v", 0));
MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(descriptors);
runner.run(QueryPlus.wrap(query)).toList();
Assert.assertEquals(expected, ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec());
}
Aggregations