use of org.apache.druid.segment.filter.SelectorFilter in project druid by druid-io.
the class HashJoinSegmentStorageAdapterTest method test_makeCursors_factToCountryLeft_filterExcludesAllLeftRows.
@Test
public void test_makeCursors_factToCountryLeft_filterExcludesAllLeftRows() {
Filter originalFilter = new SelectorFilter("page", "this matches nothing");
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(originalFilter, joinableClauses, VirtualColumns.EMPTY);
JoinTestHelper.verifyCursors(new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis).makeCursors(originalFilter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryNumber"), ImmutableList.of());
}
use of org.apache.druid.segment.filter.SelectorFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readAndFilter.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readAndFilter(Blackhole blackhole) {
Filter andFilter = new AndFilter(ImmutableList.of(new SelectorFilter("dimUniform", "199"), new NoBitmapSelectorDimFilter("dimUniform", "super-199", JS_EXTRACTION_FN).toFilter()));
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, andFilter);
readCursors(cursors, blackhole);
}
use of org.apache.druid.segment.filter.SelectorFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readOrFilterCNF.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readOrFilterCNF(Blackhole blackhole) {
Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar")));
Filter orFilter = new OrFilter(Arrays.asList(filter, filter2));
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, Filters.toCnf(orFilter));
readCursors(cursors, blackhole);
}
use of org.apache.druid.segment.filter.SelectorFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readOrFilter.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readOrFilter(Blackhole blackhole) {
Filter filter = new NoBitmapSelectorFilter("dimSequential", "199");
Filter filter2 = new AndFilter(Arrays.asList(new SelectorFilter("dimMultivalEnumerated2", "Corundum"), new NoBitmapSelectorFilter("dimMultivalEnumerated", "Bar")));
Filter orFilter = new OrFilter(Arrays.asList(filter, filter2));
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, orFilter);
readCursors(cursors, blackhole);
}
use of org.apache.druid.segment.filter.SelectorFilter in project druid by druid-io.
the class FilterPartitionBenchmark method readWithPreFilter.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void readWithPreFilter(Blackhole blackhole) {
Filter filter = new SelectorFilter("dimSequential", "199");
StorageAdapter sa = new QueryableIndexStorageAdapter(qIndex);
Sequence<Cursor> cursors = makeCursors(sa, filter);
readCursors(cursors, blackhole);
}
Aggregations