use of org.apache.druid.query.filter.Filter in project druid by druid-io.
the class GroupByQueryEngineV2 method process.
public static Sequence<ResultRow> process(final GroupByQuery query, @Nullable final StorageAdapter storageAdapter, final NonBlockingPool<ByteBuffer> intermediateResultsBufferPool, final GroupByQueryConfig querySpecificConfig) {
if (storageAdapter == null) {
throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
if (intervals.size() != 1) {
throw new IAE("Should only have one interval, got[%s]", intervals);
}
final ResourceHolder<ByteBuffer> bufferHolder = intermediateResultsBufferPool.take();
try {
final String fudgeTimestampString = NullHandling.emptyToNullIfNeeded(query.getContextValue(GroupByStrategyV2.CTX_KEY_FUDGE_TIMESTAMP, null));
final DateTime fudgeTimestamp = fudgeTimestampString == null ? null : DateTimes.utc(Long.parseLong(fudgeTimestampString));
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getFilter()));
final Interval interval = Iterables.getOnlyElement(query.getIntervals());
final boolean doVectorize = QueryContexts.getVectorize(query).shouldVectorize(VectorGroupByEngine.canVectorize(query, storageAdapter, filter));
final Sequence<ResultRow> result;
if (doVectorize) {
result = VectorGroupByEngine.process(query, storageAdapter, bufferHolder.get(), fudgeTimestamp, filter, interval, querySpecificConfig);
} else {
result = processNonVectorized(query, storageAdapter, bufferHolder.get(), fudgeTimestamp, querySpecificConfig, filter, interval);
}
return result.withBaggage(bufferHolder);
} catch (Throwable e) {
bufferHolder.close();
throw e;
}
}
use of org.apache.druid.query.filter.Filter in project druid by druid-io.
the class GroupByQueryEngine method process.
public Sequence<Row> process(final GroupByQuery query, final StorageAdapter storageAdapter) {
if (storageAdapter == null) {
throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
if (!query.getContextValue(GroupByQueryConfig.CTX_KEY_ENABLE_MULTI_VALUE_UNNESTING, true)) {
throw new UOE("GroupBy v1 does not support %s as false. Set %s to true or use groupBy v2", GroupByQueryConfig.CTX_KEY_ENABLE_MULTI_VALUE_UNNESTING, GroupByQueryConfig.CTX_KEY_ENABLE_MULTI_VALUE_UNNESTING);
}
final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
if (intervals.size() != 1) {
throw new IAE("Should only have one interval, got[%s]", intervals);
}
Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimFilter()));
final Sequence<Cursor> cursors = storageAdapter.makeCursors(filter, intervals.get(0), query.getVirtualColumns(), query.getGranularity(), false, null);
final ResourceHolder<ByteBuffer> bufferHolder = intermediateResultsBufferPool.take();
return Sequences.concat(Sequences.withBaggage(Sequences.map(cursors, new Function<Cursor, Sequence<Row>>() {
@Override
public Sequence<Row> apply(final Cursor cursor) {
return new BaseSequence<>(new BaseSequence.IteratorMaker<Row, RowIterator>() {
@Override
public RowIterator make() {
return new RowIterator(query, cursor, bufferHolder.get(), config.get());
}
@Override
public void cleanup(RowIterator iterFromMake) {
CloseableUtils.closeAndWrapExceptions(iterFromMake);
}
});
}
}), bufferHolder));
}
use of org.apache.druid.query.filter.Filter in project druid by druid-io.
the class SearchQueryFilterTest method testRequiredColumnRewrite.
@Test
public void testRequiredColumnRewrite() {
Filter filter = new SearchQueryDimFilter("dim0", specForValue("a"), null).toFilter();
Filter filter2 = new SearchQueryDimFilter("dim1", specForValue("a"), null).toFilter();
Assert.assertTrue(filter.supportsRequiredColumnRewrite());
Assert.assertTrue(filter2.supportsRequiredColumnRewrite());
Filter rewrittenFilter = filter.rewriteRequiredColumns(ImmutableMap.of("dim0", "dim1"));
Assert.assertEquals(filter2, rewrittenFilter);
expectedException.expect(IAE.class);
expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0");
filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1"));
}
use of org.apache.druid.query.filter.Filter in project druid by druid-io.
the class RegexFilterTest method testRequiredColumnRewrite.
@Test
public void testRequiredColumnRewrite() {
Filter filter = new RegexDimFilter("dim0", ".*", null).toFilter();
Filter filter2 = new RegexDimFilter("dim1", ".*", null).toFilter();
Assert.assertTrue(filter.supportsRequiredColumnRewrite());
Assert.assertTrue(filter2.supportsRequiredColumnRewrite());
Filter rewrittenFilter = filter.rewriteRequiredColumns(ImmutableMap.of("dim0", "dim1"));
Assert.assertEquals(filter2, rewrittenFilter);
expectedException.expect(IAE.class);
expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0");
filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1"));
}
use of org.apache.druid.query.filter.Filter in project druid by druid-io.
the class FilterPartitionTest method testDistributeOrCNF.
@Test
public void testDistributeOrCNF() {
DimFilter dimFilter1 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dim0", "6", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "abdef", null), new SelectorDimFilter("dim2", "c", null)))));
Filter filter1 = dimFilter1.toFilter();
Filter filter1CNF = Filters.toCnf(filter1);
Assert.assertEquals(AndFilter.class, filter1CNF.getClass());
Assert.assertEquals(2, ((AndFilter) filter1CNF).getFilters().size());
assertFilterMatches(dimFilter1, ImmutableList.of("4", "6"));
DimFilter dimFilter2 = new OrDimFilter(Arrays.asList(new SelectorDimFilter("dim0", "2", null), new SelectorDimFilter("dim0", "3", null), new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "HELLO", null), new SelectorDimFilter("dim2", "foo", null)))));
assertFilterMatches(dimFilter2, ImmutableList.of("2", "3", "7"));
DimFilter dimFilter3 = new OrDimFilter(Arrays.asList(dimFilter1, dimFilter2, new AndDimFilter(Arrays.asList(new NoBitmapSelectorDimFilter("dim1", "1", null), new SelectorDimFilter("dim2", "foo", null)))));
assertFilterMatches(dimFilter3, ImmutableList.of("2", "3", "4", "6", "7", "9"));
}
Aggregations