use of org.apache.druid.query.filter.BoundDimFilter in project druid by druid-io.
the class FloatAndDoubleFilteringTest method doTestFloatColumnFiltering.
private void doTestFloatColumnFiltering(final String columnName) {
assertFilterMatches(new SelectorDimFilter(columnName, "3", null), ImmutableList.of("3"));
assertFilterMatches(new SelectorDimFilter(columnName, "3.0", null), ImmutableList.of("3"));
assertFilterMatches(new BoundDimFilter(columnName, "2", "5", false, false, null, null, StringComparators.NUMERIC), ImmutableList.of("2", "3", "4", "5"));
assertFilterMatches(new BoundDimFilter(columnName, "2.0", "5.0", false, false, null, null, StringComparators.NUMERIC), ImmutableList.of("2", "3", "4", "5"));
assertFilterMatches(new BoundDimFilter(columnName, "1", "4", true, true, null, null, StringComparators.NUMERIC), ImmutableList.of("2", "3"));
assertFilterMatches(new BoundDimFilter(columnName, "1.0", "4.0", true, true, null, null, StringComparators.NUMERIC), ImmutableList.of("2", "3"));
assertFilterMatches(new InDimFilter(columnName, Arrays.asList("2", "4", "8"), null), ImmutableList.of("2", "4"));
assertFilterMatches(new InDimFilter(columnName, Arrays.asList("2.0", "4.0", "8.0"), null), ImmutableList.of("2", "4"));
// cross the hashing threshold to test hashset implementation, filter on even values
List<String> infilterValues = new ArrayList<>(NUM_FILTER_VALUES);
for (int i = 0; i < NUM_FILTER_VALUES; i++) {
infilterValues.add(String.valueOf(i * 2));
}
assertFilterMatches(new InDimFilter(columnName, infilterValues, null), ImmutableList.of("2", "4", "6"));
String jsFn = "function(x) { return(x === 3 || x === 5) }";
assertFilterMatchesSkipVectorize(new JavaScriptDimFilter(columnName, jsFn, null, JavaScriptConfig.getEnabledInstance()), ImmutableList.of("3", "5"));
String jsFn2 = "function(x) { return(x === 3.0 || x === 5.0) }";
assertFilterMatchesSkipVectorize(new JavaScriptDimFilter(columnName, jsFn2, null, JavaScriptConfig.getEnabledInstance()), ImmutableList.of("3", "5"));
assertFilterMatches(new RegexDimFilter(columnName, "4", null), ImmutableList.of("4"));
assertFilterMatches(new RegexDimFilter(columnName, "4.0", null), ImmutableList.of("4"));
assertFilterMatches(new SearchQueryDimFilter(columnName, new ContainsSearchQuerySpec("2", true), null), ImmutableList.of("2"));
assertFilterMatches(new SearchQueryDimFilter(columnName, new ContainsSearchQuerySpec("2", true), null), ImmutableList.of("2"));
}
use of org.apache.druid.query.filter.BoundDimFilter in project druid by druid-io.
the class BoundFilterTest method testNumericMatchTooStrict.
@Test
public void testNumericMatchTooStrict() {
assertFilterMatches(new BoundDimFilter("dim1", "2", "2", true, false, false, null, StringComparators.NUMERIC), ImmutableList.of());
assertFilterMatches(new BoundDimFilter("dim1", "2", "2", true, true, false, null, StringComparators.NUMERIC), ImmutableList.of());
assertFilterMatches(new BoundDimFilter("dim1", "2", "2", false, true, false, null, StringComparators.NUMERIC), ImmutableList.of());
}
use of org.apache.druid.query.filter.BoundDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithHavingSpecOnLongAndFloat.
@Test
public void testGroupByWithHavingSpecOnLongAndFloat() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ColumnType.LONG), new DefaultDimensionSpec("__time", "time_alias", ColumnType.LONG), new DefaultDimensionSpec("index", "index_alias", ColumnType.FLOAT)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setHavingSpec(new DimFilterHavingSpec(new AndDimFilter(Lists.newArrayList(new SelectorDimFilter("ql_alias", "1400", null), new SelectorDimFilter("time_alias", "1301616000000", null), new BoundDimFilter("index_alias", "1310.0", "1320.0", true, true, null, null, StringComparators.NUMERIC))), null)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
List<ResultRow> expectedResults = Collections.singletonList(makeRow(query, "2011-04-01", "alias", "total_market", "time_alias", 1301616000000L, "index_alias", 1314.8397, "ql_alias", 1400L, "rows", 1L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "havingspec-long-float");
}
use of org.apache.druid.query.filter.BoundDimFilter in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testResultLevelCacheKeyWithHavingDimFilterHavingSpec.
@Test
public void testResultLevelCacheKeyWithHavingDimFilterHavingSpec() {
final DimFilterHavingSpec havingSpec1 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final DimFilterHavingSpec havingSpec2 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "317", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec1).build();
final GroupByQuery query2 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec2).build();
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
Assert.assertTrue(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.filter.BoundDimFilter in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithAllFiltersOnNullDimsWithExtractionFns.
@Test
public void testGroupByWithAllFiltersOnNullDimsWithExtractionFns() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("", "EMPTY");
extractionMap.put(null, "EMPTY");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
String jsFn = "function(x) { return(x === 'EMPTY') }";
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("null_column", "EMPTY", extractionFn));
superFilterList.add(new InDimFilter("null_column", Arrays.asList("NOT-EMPTY", "FOOBAR", "EMPTY"), extractionFn));
superFilterList.add(new BoundDimFilter("null_column", "EMPTY", "EMPTY", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("null_column", "EMPTY", extractionFn));
superFilterList.add(new SearchQueryDimFilter("null_column", new ContainsSearchQuerySpec("EMPTY", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("null_column", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("null_column", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setDimFilter(superFilter).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", null, "rows", 13L, "idx", 6619L), makeRow(query, "2011-04-02", "alias", null, "rows", 13L, "idx", 5827L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "extraction");
}
Aggregations