use of io.druid.query.filter.DimFilter in project druid by druid-io.
the class FilterPartitionTest method testDistributeOrCNFExtractionFn.
@Test
public void testDistributeOrCNFExtractionFn() {
DimFilter dimFilter1 = new OrDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim0", "super-6", JS_EXTRACTION_FN), new AndDimFilter(Arrays.<DimFilter>asList(new NoBitmapSelectorDimFilter("dim1", "super-def", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "super-c", JS_EXTRACTION_FN)))));
Filter filter1 = dimFilter1.toFilter();
Filter filter1CNF = Filters.convertToCNF(filter1);
Assert.assertEquals(AndFilter.class, filter1CNF.getClass());
Assert.assertEquals(2, ((AndFilter) filter1CNF).getFilters().size());
assertFilterMatches(dimFilter1, ImmutableList.of("4", "6"));
DimFilter dimFilter2 = new OrDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim0", "super-2", JS_EXTRACTION_FN), new SelectorDimFilter("dim0", "super-3", JS_EXTRACTION_FN), new AndDimFilter(Arrays.<DimFilter>asList(new NoBitmapSelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN)))));
assertFilterMatches(dimFilter2, ImmutableList.of("2", "3", "7"));
DimFilter dimFilter3 = new OrDimFilter(Arrays.<DimFilter>asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.<DimFilter>asList(new NoBitmapSelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), new SelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN)))));
Filter filter3 = dimFilter3.toFilter();
Filter filter3CNF = Filters.convertToCNF(dimFilter3.toFilter());
assertFilterMatches(dimFilter3, ImmutableList.of("2", "3", "4", "6", "7", "9"));
}
use of io.druid.query.filter.DimFilter in project druid by druid-io.
the class FilterPartitionTest method testBasicPreAndPostFilterWithNulls.
@Test
public void testBasicPreAndPostFilterWithNulls() {
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim2", "a", null), new NoBitmapSelectorDimFilter("dim1", null, null))), ImmutableList.of("0"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "10", null), new NoBitmapSelectorDimFilter("dim2", null, null))), ImmutableList.of("1"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "1", null), new NoBitmapSelectorDimFilter("dim2", "foo", null))), ImmutableList.of("9"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "HELLO", null), new NoBitmapSelectorDimFilter("dim2", "bar", null))), ImmutableList.<String>of());
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new NoBitmapSelectorDimFilter("dim2", "bar", null), new SelectorDimFilter("dim1", "NOT_A_VALUE", null))), ImmutableList.<String>of());
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("0"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "super-10", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("1"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN))), ImmutableList.of("2"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-foo", JS_EXTRACTION_FN))), ImmutableList.of("9"));
assertFilterMatches(new AndDimFilter(Arrays.<DimFilter>asList(new SelectorDimFilter("dim1", "super-HELLO", JS_EXTRACTION_FN), new NoBitmapSelectorDimFilter("dim2", "super-bar", JS_EXTRACTION_FN))), ImmutableList.<String>of());
}
use of io.druid.query.filter.DimFilter in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue.
@Test
public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue() {
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("", "NULL");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension("null_column").metric(QueryRunnerTestHelper.indexMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new FilteredAggregatorFactory(new DoubleMaxAggregatorFactory("maxIndex", "index"), extractionFilter), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant));
TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder.filters(extractionFilter).build();
Map<String, Object> map = Maps.newHashMap();
map.put("null_column", null);
map.put("rows", 1209L);
map.put("index", 503332.5071372986D);
map.put("addRowsIndexConstant", 504542.5071372986D);
map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
map.put("maxIndex", 1870.06103515625D);
map.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList(map))));
assertExpectedResults(expectedResults, topNQueryWithNULLValueExtraction);
}
use of io.druid.query.filter.DimFilter in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeseriesWithMultiValueDimFilterAndOr1.
@Test
public void testTimeseriesWithMultiValueDimFilterAndOr1() {
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build(), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.placementishDimension).value("a").build())).build();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build();
AndDimFilter andDimFilter2 = Druids.newAndDimFilterBuilder().fields(Arrays.<DimFilter>asList(Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.marketDimension).value("spot").build(), Druids.newSelectorDimFilterBuilder().dimension(QueryRunnerTestHelper.qualityDimension).value("automotive").build())).build();
Iterable<Result<TimeseriesResultValue>> expectedResults = Sequences.toList(runner.run(Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.dayGran).filters(andDimFilter2).intervals(QueryRunnerTestHelper.firstToThird).aggregators(QueryRunnerTestHelper.commonAggregators).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).descending(descending).build(), CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
Iterable<Result<TimeseriesResultValue>> actualResults = Sequences.toList(runner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList());
TestHelper.assertExpectedResults(expectedResults, actualResults);
}
use of io.druid.query.filter.DimFilter in project druid by druid-io.
the class DruidSemiJoin method getLeftRelWithFilter.
/**
* Returns a copy of the left rel with the filter applied from the right-hand side. This is an expensive operation
* since it actually executes the right-hand side query.
*/
private DruidRel<?> getLeftRelWithFilter() {
// Build list of acceptable values from right side.
final Set<List<String>> valuess = Sets.newHashSet();
final List<DimFilter> filters = Lists.newArrayList();
right.runQuery().accumulate(null, new Accumulator<Object, Object[]>() {
@Override
public Object accumulate(final Object dummyValue, final Object[] row) {
final List<String> values = Lists.newArrayListWithCapacity(rightKeys.size());
for (int i : rightKeys) {
final Object value = row[i];
final String stringValue = value != null ? String.valueOf(value) : "";
values.add(stringValue);
if (values.size() > maxSemiJoinRowsInMemory) {
throw new ResourceLimitExceededException(String.format("maxSemiJoinRowsInMemory[%,d] exceeded", maxSemiJoinRowsInMemory));
}
}
if (valuess.add(values)) {
final List<DimFilter> bounds = Lists.newArrayList();
for (int i = 0; i < values.size(); i++) {
bounds.add(new BoundDimFilter(leftRowExtractions.get(i).getColumn(), values.get(i), values.get(i), false, false, null, leftRowExtractions.get(i).getExtractionFn(), getSourceRowSignature().naturalStringComparator(leftRowExtractions.get(i))));
}
filters.add(new AndDimFilter(bounds));
}
return null;
}
});
valuess.clear();
if (!filters.isEmpty()) {
// Add a filter to the left side. Use OR of singleton Bound filters so they can be simplified later.
final DimFilter semiJoinFilter = new OrDimFilter(filters);
final DimFilter newFilter = left.getQueryBuilder().getFilter() == null ? semiJoinFilter : new AndDimFilter(ImmutableList.of(semiJoinFilter, left.getQueryBuilder().getFilter()));
return left.withQueryBuilder(left.getQueryBuilder().withFilter(newFilter));
} else {
return null;
}
}
Aggregations