Search in sources :

Example 76 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class GroupByMergedQueryRunner method run.

@Override
public Sequence<T> run(final QueryPlus<T> queryPlus, final ResponseContext responseContext) {
    final GroupByQuery query = (GroupByQuery) queryPlus.getQuery();
    final GroupByQueryConfig querySpecificConfig = configSupplier.get().withOverrides(query);
    final boolean isSingleThreaded = querySpecificConfig.isSingleThreaded();
    final Pair<IncrementalIndex, Accumulator<IncrementalIndex, T>> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair(query, null, querySpecificConfig);
    final Pair<Queue, Accumulator<Queue, T>> bySegmentAccumulatorPair = GroupByQueryHelper.createBySegmentAccumulatorPair();
    final boolean bySegment = QueryContexts.isBySegment(query);
    final int priority = QueryContexts.getPriority(query);
    final QueryPlus<T> threadSafeQueryPlus = queryPlus.withoutThreadUnsafeState();
    final List<ListenableFuture<Void>> futures = Lists.newArrayList(Iterables.transform(queryables, new Function<QueryRunner<T>, ListenableFuture<Void>>() {

        @Override
        public ListenableFuture<Void> apply(final QueryRunner<T> input) {
            if (input == null) {
                throw new ISE("Null queryRunner! Looks to be some segment unmapping action happening");
            }
            ListenableFuture<Void> future = queryProcessingPool.submitRunnerTask(new AbstractPrioritizedQueryRunnerCallable<Void, T>(priority, input) {

                @Override
                public Void call() {
                    try {
                        if (bySegment) {
                            input.run(threadSafeQueryPlus, responseContext).accumulate(bySegmentAccumulatorPair.lhs, bySegmentAccumulatorPair.rhs);
                        } else {
                            input.run(threadSafeQueryPlus, responseContext).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs);
                        }
                        return null;
                    } catch (QueryInterruptedException e) {
                        throw new RuntimeException(e);
                    } catch (Exception e) {
                        log.error(e, "Exception with one of the sequences!");
                        Throwables.propagateIfPossible(e);
                        throw new RuntimeException(e);
                    }
                }
            });
            if (isSingleThreaded) {
                waitForFutureCompletion(query, ImmutableList.of(future), indexAccumulatorPair.lhs);
            }
            return future;
        }
    }));
    if (!isSingleThreaded) {
        waitForFutureCompletion(query, futures, indexAccumulatorPair.lhs);
    }
    if (bySegment) {
        return Sequences.simple(bySegmentAccumulatorPair.lhs);
    }
    return Sequences.withBaggage(Sequences.simple(Iterables.transform(indexAccumulatorPair.lhs.iterableWithPostAggregations(null, query.isDescending()), new Function<Row, T>() {

        @Override
        public T apply(Row input) {
            return (T) input;
        }
    })), indexAccumulatorPair.lhs);
}
Also used : Accumulator(org.apache.druid.java.util.common.guava.Accumulator) Function(com.google.common.base.Function) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) ISE(org.apache.druid.java.util.common.ISE) Queue(java.util.Queue) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) TimeoutException(java.util.concurrent.TimeoutException) CancellationException(java.util.concurrent.CancellationException) ExecutionException(java.util.concurrent.ExecutionException) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Row(org.apache.druid.data.input.Row)

Example 77 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class EmptyIndexTest method testEmptyIndex.

@Test
public void testEmptyIndex() throws Exception {
    File tmpDir = File.createTempFile("emptyIndex", "");
    if (!tmpDir.delete()) {
        throw new IllegalStateException("tmp delete failed");
    }
    if (!tmpDir.mkdir()) {
        throw new IllegalStateException("tmp mkdir failed");
    }
    try {
        IncrementalIndex emptyIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema().setMaxRowCount(1000).build();
        IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter(Intervals.of("2012-08-01/P3D"), emptyIndex, new ConciseBitmapFactory());
        TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).merge(Collections.singletonList(emptyIndexAdapter), true, new AggregatorFactory[0], tmpDir, new IndexSpec(), -1);
        QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir);
        Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions()));
        Assert.assertEquals("getMetricNames", 0, emptyQueryableIndex.getColumnNames().size());
        Assert.assertEquals("getDataInterval", Intervals.of("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval());
        Assert.assertEquals("getReadOnlyTimestamps", 0, emptyQueryableIndex.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
    } finally {
        FileUtils.deleteDirectory(tmpDir);
    }
}
Also used : IncrementalIndexAdapter(org.apache.druid.segment.incremental.IncrementalIndexAdapter) ConciseBitmapFactory(org.apache.druid.collections.bitmap.ConciseBitmapFactory) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) File(java.io.File) Test(org.junit.Test)

Example 78 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class IndexBuilder method buildMMappedIndex.

public QueryableIndex buildMMappedIndex() {
    Preconditions.checkNotNull(indexMerger, "indexMerger");
    Preconditions.checkNotNull(tmpDir, "tmpDir");
    try (final IncrementalIndex incrementalIndex = buildIncrementalIndex()) {
        return indexIO.loadIndex(indexMerger.persist(incrementalIndex, new File(tmpDir, StringUtils.format("testIndex-%s", ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE))), indexSpec, null));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) File(java.io.File)

Example 79 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class IndexBuilder method buildIncrementalIndexWithRows.

private static IncrementalIndex buildIncrementalIndexWithRows(IncrementalIndexSchema schema, int maxRows, Iterable<InputRow> rows) {
    Preconditions.checkNotNull(schema, "schema");
    final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(maxRows).build();
    for (InputRow row : rows) {
        try {
            incrementalIndex.add(row);
        } catch (IndexSizeExceededException e) {
            throw new RuntimeException(e);
        }
    }
    return incrementalIndex;
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) InputRow(org.apache.druid.data.input.InputRow) IndexSizeExceededException(org.apache.druid.segment.incremental.IndexSizeExceededException)

Example 80 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class IndexMergerRollupTest method testStringFirstLastRollup.

private void testStringFirstLastRollup(AggregatorFactory[] aggregatorFactories) throws Exception {
    List<Map<String, Object>> eventsList = Arrays.asList(new HashMap<String, Object>() {

        {
            put("d", "d1");
            put("m", "m1");
        }
    }, new HashMap<String, Object>() {

        {
            put("d", "d1");
            put("m", "m2");
        }
    });
    final File tempDir = temporaryFolder.newFolder();
    List<QueryableIndex> indexes = new ArrayList<>();
    Instant time = Instant.now();
    for (Map<String, Object> events : eventsList) {
        IncrementalIndex toPersist = IncrementalIndexTest.createIndex(aggregatorFactories);
        toPersist.add(new MapBasedInputRow(time.toEpochMilli(), ImmutableList.of("d"), events));
        indexes.add(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)));
    }
    File indexFile = indexMerger.mergeQueryableIndex(indexes, true, aggregatorFactories, tempDir, indexSpec, null, -1);
    try (QueryableIndex mergedIndex = indexIO.loadIndex(indexFile)) {
        Assert.assertEquals("Number of rows should be 1", 1, mergedIndex.getNumRows());
    }
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) Instant(java.time.Instant) ArrayList(java.util.ArrayList) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) HashMap(java.util.HashMap) Map(java.util.Map) File(java.io.File)

Aggregations

IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)109 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)85 File (java.io.File)59 Test (org.junit.Test)51 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)46 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)26 IncrementalIndexTest (org.apache.druid.segment.data.IncrementalIndexTest)26 ArrayList (java.util.ArrayList)25 IncrementalIndexSchema (org.apache.druid.segment.incremental.IncrementalIndexSchema)25 IndexSpec (org.apache.druid.segment.IndexSpec)19 QueryableIndex (org.apache.druid.segment.QueryableIndex)19 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)17 InputRow (org.apache.druid.data.input.InputRow)15 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)14 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)12 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)11 IOException (java.io.IOException)10 Before (org.junit.Before)10 Interval (org.joda.time.Interval)9