Search in sources :

Example 1 with QueryableIndex

use of org.apache.druid.segment.QueryableIndex in project druid by druid-io.

the class UseIndexesStrategy method getExecutionPlan.

@Override
public List<SearchQueryExecutor> getExecutionPlan(SearchQuery query, Segment segment) {
    final ImmutableList.Builder<SearchQueryExecutor> builder = ImmutableList.builder();
    final QueryableIndex index = segment.asQueryableIndex();
    final StorageAdapter adapter = segment.asStorageAdapter();
    final List<DimensionSpec> searchDims = getDimsToSearch(adapter.getAvailableDimensions(), query.getDimensions());
    if (index != null) {
        // pair of bitmap dims and non-bitmap dims
        final Pair<List<DimensionSpec>, List<DimensionSpec>> pair = partitionDimensionList(adapter, searchDims);
        final List<DimensionSpec> bitmapSuppDims = pair.lhs;
        final List<DimensionSpec> nonBitmapSuppDims = pair.rhs;
        if (bitmapSuppDims.size() > 0) {
            final BitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(index.getBitmapFactoryForDimensions(), VirtualColumns.EMPTY, index);
            // from the non-bitmap-support filter, and then use it to compute the filtered result by intersecting bitmaps.
            if (filter == null || filter.supportsBitmapIndex(selector)) {
                final ImmutableBitmap timeFilteredBitmap = makeTimeFilteredBitmap(index, segment, filter, interval);
                builder.add(new IndexOnlyExecutor(query, segment, timeFilteredBitmap, bitmapSuppDims));
            } else {
                // Fall back to cursor-based execution strategy
                nonBitmapSuppDims.addAll(bitmapSuppDims);
            }
        }
        if (nonBitmapSuppDims.size() > 0) {
            builder.add(new CursorBasedExecutor(query, segment, filter, interval, nonBitmapSuppDims));
        }
    } else {
        builder.add(new CursorBasedExecutor(query, segment, filter, interval, searchDims));
    }
    return builder.build();
}
Also used : DimensionSpec(org.apache.druid.query.dimension.DimensionSpec) ImmutableBitmap(org.apache.druid.collections.bitmap.ImmutableBitmap) ImmutableList(com.google.common.collect.ImmutableList) StorageAdapter(org.apache.druid.segment.StorageAdapter) CursorBasedExecutor(org.apache.druid.query.search.CursorOnlyStrategy.CursorBasedExecutor) ColumnSelectorBitmapIndexSelector(org.apache.druid.segment.ColumnSelectorBitmapIndexSelector) QueryableIndex(org.apache.druid.segment.QueryableIndex) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) ColumnSelectorBitmapIndexSelector(org.apache.druid.segment.ColumnSelectorBitmapIndexSelector) BitmapIndexSelector(org.apache.druid.query.filter.BitmapIndexSelector)

Example 2 with QueryableIndex

use of org.apache.druid.segment.QueryableIndex in project druid by druid-io.

the class AggregationTestHelper method createIndex.

public void createIndex(Iterator rows, InputRowParser parser, final AggregatorFactory[] metrics, File outDir, long minTimestamp, Granularity gran, boolean deserializeComplexMetrics, int maxRowCount, boolean rollup) throws Exception {
    IncrementalIndex index = null;
    List<File> toMerge = new ArrayList<>();
    try {
        index = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(minTimestamp).withDimensionsSpec(parser.getParseSpec().getDimensionsSpec()).withQueryGranularity(gran).withMetrics(metrics).withRollup(rollup).build()).setDeserializeComplexMetrics(deserializeComplexMetrics).setMaxRowCount(maxRowCount).build();
        while (rows.hasNext()) {
            Object row = rows.next();
            if (!index.canAppendRow()) {
                File tmp = tempFolder.newFolder();
                toMerge.add(tmp);
                indexMerger.persist(index, tmp, new IndexSpec(), null);
                index.close();
                index = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(minTimestamp).withDimensionsSpec(parser.getParseSpec().getDimensionsSpec()).withQueryGranularity(gran).withMetrics(metrics).withRollup(rollup).build()).setDeserializeComplexMetrics(deserializeComplexMetrics).setMaxRowCount(maxRowCount).build();
            }
            if (row instanceof String && parser instanceof StringInputRowParser) {
                // Note: this is required because StringInputRowParser is InputRowParser<ByteBuffer> as opposed to
                // InputRowsParser<String>
                index.add(((StringInputRowParser) parser).parse((String) row));
            } else {
                index.add(((List<InputRow>) parser.parseBatch(row)).get(0));
            }
        }
        if (toMerge.size() > 0) {
            File tmp = tempFolder.newFolder();
            toMerge.add(tmp);
            indexMerger.persist(index, tmp, new IndexSpec(), null);
            List<QueryableIndex> indexes = new ArrayList<>(toMerge.size());
            for (File file : toMerge) {
                indexes.add(indexIO.loadIndex(file));
            }
            indexMerger.mergeQueryableIndex(indexes, rollup, metrics, outDir, new IndexSpec(), null, -1);
            for (QueryableIndex qi : indexes) {
                qi.close();
            }
        } else {
            indexMerger.persist(index, outDir, new IndexSpec(), null);
        }
    } finally {
        if (index != null) {
            index.close();
        }
    }
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) ArrayList(java.util.ArrayList) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) QueryableIndex(org.apache.druid.segment.QueryableIndex) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) InputRow(org.apache.druid.data.input.InputRow) File(java.io.File) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema)

Example 3 with QueryableIndex

use of org.apache.druid.segment.QueryableIndex in project druid by druid-io.

the class GroupByLimitPushDownInsufficientBufferTest method getRunner2.

private List<QueryRunner<ResultRow>> getRunner2() {
    List<QueryRunner<ResultRow>> runners = new ArrayList<>();
    QueryableIndex index2 = groupByIndices.get(1);
    QueryRunner<ResultRow> tooSmallRunner = makeQueryRunner(tooSmallGroupByFactory, SegmentId.dummy(index2.toString()), new QueryableIndexSegment(index2, SegmentId.dummy(index2.toString())));
    runners.add(tooSmallGroupByFactory.getToolchest().preMergeQueryDecoration(tooSmallRunner));
    return runners;
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex) ArrayList(java.util.ArrayList) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner)

Example 4 with QueryableIndex

use of org.apache.druid.segment.QueryableIndex in project druid by druid-io.

the class GroupByLimitPushDownInsufficientBufferTest method setup.

@Before
public void setup() throws Exception {
    tmpDir = FileUtils.createTempDir();
    InputRow row;
    List<String> dimNames = Arrays.asList("dimA", "metA");
    Map<String, Object> event;
    final IncrementalIndex indexA = makeIncIndex(false);
    incrementalIndices.add(indexA);
    event = new HashMap<>();
    event.put("dimA", "hello");
    event.put("metA", 100);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 95);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "world");
    event.put("metA", 75);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "fubaz");
    event.put("metA", 75);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "zortaxx");
    event.put("metA", 999);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "blarg");
    event.put("metA", 125);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "blerg");
    event.put("metA", 130);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexA.add(row);
    final File fileA = INDEX_MERGER_V9.persist(indexA, new File(tmpDir, "A"), new IndexSpec(), OffHeapMemorySegmentWriteOutMediumFactory.instance());
    QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
    final IncrementalIndex indexB = makeIncIndex(false);
    incrementalIndices.add(indexB);
    event = new HashMap<>();
    event.put("dimA", "foo");
    event.put("metA", 200);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "world");
    event.put("metA", 75);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 95);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "zebra");
    event.put("metA", 180);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "blorg");
    event.put("metA", 120);
    row = new MapBasedInputRow(1000, dimNames, event);
    indexB.add(row);
    final File fileB = INDEX_MERGER_V9.persist(indexB, new File(tmpDir, "B"), new IndexSpec(), OffHeapMemorySegmentWriteOutMediumFactory.instance());
    QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
    groupByIndices = Arrays.asList(qindexA, qindexB);
    resourceCloser = Closer.create();
    setupGroupByFactory();
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) QueryableIndex(org.apache.druid.segment.QueryableIndex) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) File(java.io.File) Before(org.junit.Before)

Example 5 with QueryableIndex

use of org.apache.druid.segment.QueryableIndex in project druid by druid-io.

the class GroupByLimitPushDownMultiNodeMergeTest method getRunner1.

private List<QueryRunner<ResultRow>> getRunner1(int qIndexNumber) {
    List<QueryRunner<ResultRow>> runners = new ArrayList<>();
    QueryableIndex index = groupByIndices.get(qIndexNumber);
    QueryRunner<ResultRow> runner = makeQueryRunner(groupByFactory, SegmentId.dummy(index.toString()), new QueryableIndexSegment(index, SegmentId.dummy(index.toString())));
    runners.add(groupByFactory.getToolchest().preMergeQueryDecoration(runner));
    return runners;
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex) ArrayList(java.util.ArrayList) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner)

Aggregations

QueryableIndex (org.apache.druid.segment.QueryableIndex)67 File (java.io.File)29 ArrayList (java.util.ArrayList)29 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)18 DataSegment (org.apache.druid.timeline.DataSegment)18 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)17 IOException (java.io.IOException)15 LinearShardSpec (org.apache.druid.timeline.partition.LinearShardSpec)15 IndexSpec (org.apache.druid.segment.IndexSpec)14 InputRow (org.apache.druid.data.input.InputRow)13 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)13 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)12 SpecificSegmentsQuerySegmentWalker (org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker)12 List (java.util.List)10 Before (org.junit.Before)10 QueryRunner (org.apache.druid.query.QueryRunner)9 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)9 IndexBuilder (org.apache.druid.segment.IndexBuilder)9 Nullable (javax.annotation.Nullable)8 ISE (org.apache.druid.java.util.common.ISE)8