use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readComplexOrFilterCNF.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readComplexOrFilterCNF(Blackhole blackhole) {
DimFilter dimFilter1 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null)))));
DimFilter dimFilter2 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null)))));
DimFilter dimFilter3 = new OrDimFilter(Arrays.asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null)))));
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, Filters.toCnf(dimFilter3.toFilter()));
readCursors(cursors, blackhole);
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readComplexOrFilter.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readComplexOrFilter(Blackhole blackhole) {
DimFilter dimFilter1 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "199", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Corundum", null), new SelectorDimFilter("dimMultivalEnumerated", "Bar", null)))));
DimFilter dimFilter2 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dimSequential", "299", null), new SelectorDimFilter("dimSequential", "399", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Xylophone", null), new SelectorDimFilter("dimMultivalEnumerated", "Foo", null)))));
DimFilter dimFilter3 = new OrDimFilter(Arrays.asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dimMultivalEnumerated2", "Orange", null), new SelectorDimFilter("dimMultivalEnumerated", "World", null)))));
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, dimFilter3.toFilter());
readCursors(cursors, blackhole);
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class IncrementalIndexReadBenchmark method readWithFilters.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readWithFilters(Blackhole blackhole) {
DimFilter filter = new OrDimFilter(Arrays.asList(new BoundDimFilter("dimSequential", "-1", "-1", true, true, null, null, StringComparators.ALPHANUMERIC), new JavaScriptDimFilter("dimSequential", "function(x) { return false }", null, JavaScriptConfig.getEnabledInstance()), new RegexDimFilter("dimSequential", "X", null), new SearchQueryDimFilter("dimSequential", new ContainsSearchQuerySpec("X", false), null), new InDimFilter("dimSequential", Collections.singletonList("X"), null)));
IncrementalIndexStorageAdapter sa = new IncrementalIndexStorageAdapter(incIndex);
Sequence<Cursor> cursors = makeCursors(sa, filter);
Cursor cursor = cursors.limit(1).toList().get(0);
List<DimensionSelector> selectors = new ArrayList<>();
selectors.add(makeDimensionSelector(cursor, "dimSequential"));
selectors.add(makeDimensionSelector(cursor, "dimZipf"));
selectors.add(makeDimensionSelector(cursor, "dimUniform"));
selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull"));
cursor.reset();
while (!cursor.isDone()) {
for (DimensionSelector selector : selectors) {
IndexedInts row = selector.getRow();
blackhole.consume(selector.lookupName(row.get(0)));
}
cursor.advance();
}
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class HashJoinSegmentStorageAdapterTest method test_makeCursors_factToCountryLeftWithFilterOnJoinable.
@Test
public void test_makeCursors_factToCountryLeftWithFilterOnJoinable() {
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
Filter filter = new OrDimFilter(new SelectorDimFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryIsoCode", "DE", null), new SelectorDimFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", "Norway", null), new SelectorDimFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryNumber", "10", null)).toFilter();
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(filter, joinableClauses, VirtualColumns.EMPTY);
JoinTestHelper.verifyCursors(new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis).makeCursors(filter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryNumber"), ImmutableList.of(new Object[] { "Mathis Bolly", "MX", "MX", "Mexico", 10L }, new Object[] { "Diskussion:Sebastian Schulz", "DE", "DE", "Germany", 3L }, new Object[] { "Алиса в Зазеркалье", "NO", "NO", "Norway", 11L }));
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class SearchQueryRunnerTest method testSearchWithMultiOrFilter.
@Test
public void testSearchWithMultiOrFilter() {
List<SearchHit> expectedHits = new ArrayList<>();
expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93));
DimFilter filter = new OrDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "total_market", null), new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null));
checkSearchQuery(Druids.newSearchQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION).filters(filter).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).query("a").build(), expectedHits);
}
Aggregations