use of org.apache.druid.segment.StorageAdapter in project druid by druid-io.
the class UseIndexesStrategy method getExecutionPlan.
@Override
public List<SearchQueryExecutor> getExecutionPlan(SearchQuery query, Segment segment) {
final ImmutableList.Builder<SearchQueryExecutor> builder = ImmutableList.builder();
final QueryableIndex index = segment.asQueryableIndex();
final StorageAdapter adapter = segment.asStorageAdapter();
final List<DimensionSpec> searchDims = getDimsToSearch(adapter.getAvailableDimensions(), query.getDimensions());
if (index != null) {
// pair of bitmap dims and non-bitmap dims
final Pair<List<DimensionSpec>, List<DimensionSpec>> pair = partitionDimensionList(adapter, searchDims);
final List<DimensionSpec> bitmapSuppDims = pair.lhs;
final List<DimensionSpec> nonBitmapSuppDims = pair.rhs;
if (bitmapSuppDims.size() > 0) {
final BitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(index.getBitmapFactoryForDimensions(), VirtualColumns.EMPTY, index);
// from the non-bitmap-support filter, and then use it to compute the filtered result by intersecting bitmaps.
if (filter == null || filter.supportsBitmapIndex(selector)) {
final ImmutableBitmap timeFilteredBitmap = makeTimeFilteredBitmap(index, segment, filter, interval);
builder.add(new IndexOnlyExecutor(query, segment, timeFilteredBitmap, bitmapSuppDims));
} else {
// Fall back to cursor-based execution strategy
nonBitmapSuppDims.addAll(bitmapSuppDims);
}
}
if (nonBitmapSuppDims.size() > 0) {
builder.add(new CursorBasedExecutor(query, segment, filter, interval, nonBitmapSuppDims));
}
} else {
builder.add(new CursorBasedExecutor(query, segment, filter, interval, searchDims));
}
return builder.build();
}
use of org.apache.druid.segment.StorageAdapter in project druid by druid-io.
the class TopNMetricSpecOptimizationsTest method testShouldOptimizeLexicographic.
@Test
public void testShouldOptimizeLexicographic() {
// query interval is greater than segment interval, no filters, can ignoreAfterThreshold
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(threshold).intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z").aggregators(AGGS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
StorageAdapter adapter = makeFakeStorageAdapter("2018-05-30T00:00:00Z", "2018-05-30T01:00:00Z", cardinality);
DimensionSelector dimSelector = makeFakeDimSelector(cardinality);
BaseTopNAlgorithm.AggregatorArrayProvider arrayProviderToTest = new BaseTopNAlgorithm.AggregatorArrayProvider(dimSelector, query, cardinality, adapter);
arrayProviderToTest.ignoreAfterThreshold();
Pair<Integer, Integer> thePair = arrayProviderToTest.computeStartEnd(cardinality);
Assert.assertEquals(new Integer(0), thePair.lhs);
Assert.assertEquals(new Integer(threshold), thePair.rhs);
}
use of org.apache.druid.segment.StorageAdapter in project druid by druid-io.
the class TopNMetricSpecOptimizationsTest method testAlsoShouldNotOptimizeLexicographic.
@Test
public void testAlsoShouldNotOptimizeLexicographic() {
// query interval is larger than segment interval, but has filters, can ignoreAfterThreshold
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment").metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(threshold).intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z").aggregators(AGGS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
StorageAdapter adapter = makeFakeStorageAdapter("2018-05-30T00:00:00Z", "2018-05-30T01:00:00Z", cardinality);
DimensionSelector dimSelector = makeFakeDimSelector(cardinality);
BaseTopNAlgorithm.AggregatorArrayProvider arrayProviderToTest = new BaseTopNAlgorithm.AggregatorArrayProvider(dimSelector, query, cardinality, adapter);
arrayProviderToTest.ignoreAfterThreshold();
Pair<Integer, Integer> thePair = arrayProviderToTest.computeStartEnd(cardinality);
Assert.assertEquals(new Integer(0), thePair.lhs);
Assert.assertEquals(new Integer(cardinality), thePair.rhs);
}
use of org.apache.druid.segment.StorageAdapter in project druid by druid-io.
the class TopNMetricSpecOptimizationsTest method testAlsoShouldOptimizeLexicographic.
@Test
public void testAlsoShouldOptimizeLexicographic() {
// query interval is same as segment interval, no filters, can ignoreAfterThreshold
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(QueryRunnerTestHelper.INDEX_METRIC).threshold(threshold).intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z").aggregators(AGGS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).build();
StorageAdapter adapter = makeFakeStorageAdapter("2018-05-30T00:00:00Z", "2018-05-30T01:00:00Z", cardinality);
DimensionSelector dimSelector = makeFakeDimSelector(cardinality);
BaseTopNAlgorithm.AggregatorArrayProvider arrayProviderToTest = new BaseTopNAlgorithm.AggregatorArrayProvider(dimSelector, query, cardinality, adapter);
arrayProviderToTest.ignoreAfterThreshold();
Pair<Integer, Integer> thePair = arrayProviderToTest.computeStartEnd(cardinality);
Assert.assertEquals(new Integer(0), thePair.lhs);
Assert.assertEquals(new Integer(threshold), thePair.rhs);
}
use of org.apache.druid.segment.StorageAdapter in project druid by druid-io.
the class VectorGroupByEngineIteratorTest method testCreateOneGrouperAndCloseItWhenClose.
@Test
public void testCreateOneGrouperAndCloseItWhenClose() throws IOException {
final Interval interval = TestIndex.DATA_INTERVAL;
final AggregatorFactory factory = new DoubleSumAggregatorFactory("index", "index");
final GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setInterval(interval).setDimensions(new DefaultDimensionSpec("market", null, null)).setAggregatorSpecs(factory).build();
final StorageAdapter storageAdapter = new QueryableIndexStorageAdapter(TestIndex.getMMappedTestIndex());
final ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[4096]);
final VectorCursor cursor = storageAdapter.makeVectorCursor(Filters.toFilter(query.getDimFilter()), interval, query.getVirtualColumns(), false, QueryContexts.getVectorSize(query), null);
final List<GroupByVectorColumnSelector> dimensions = query.getDimensions().stream().map(dimensionSpec -> ColumnProcessors.makeVectorProcessor(dimensionSpec, GroupByVectorColumnProcessorFactory.instance(), cursor.getColumnSelectorFactory())).collect(Collectors.toList());
final MutableObject<VectorGrouper> grouperCaptor = new MutableObject<>();
final VectorGroupByEngineIterator iterator = new VectorGroupByEngineIterator(query, new GroupByQueryConfig(), storageAdapter, cursor, interval, dimensions, byteBuffer, null) {
@Override
VectorGrouper makeGrouper() {
grouperCaptor.setValue(Mockito.spy(super.makeGrouper()));
return grouperCaptor.getValue();
}
};
iterator.close();
Mockito.verify(grouperCaptor.getValue()).close();
}
Aggregations