Search in sources :

Example 16 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class NestedQueryPushDownTest method setup.

@Before
public void setup() throws Exception {
    tmpDir = FileUtils.createTempDir();
    InputRow row;
    List<String> dimNames = Arrays.asList("dimA", "metA", "dimB", "metB");
    Map<String, Object> event;
    final IncrementalIndex indexA = makeIncIndex();
    incrementalIndices.add(indexA);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 10L);
    row = new MapBasedInputRow(1505260888888L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 20L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 10L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexA.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 20L);
    row = new MapBasedInputRow(1505264400400L, dimNames, event);
    indexA.add(row);
    final File fileA = INDEX_MERGER_V9.persist(indexA, new File(tmpDir, "A"), new IndexSpec(), null);
    QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
    final IncrementalIndex indexB = makeIncIndex();
    incrementalIndices.add(indexB);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 10L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 1000L);
    event.put("dimB", "sweet");
    event.put("metB", 20L);
    row = new MapBasedInputRow(1505260800000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "pomegranate");
    event.put("metA", 1000L);
    event.put("dimB", "sour");
    event.put("metB", 10L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexB.add(row);
    event = new HashMap<>();
    event.put("dimA", "mango");
    event.put("metA", 1000L);
    event.put("dimB", "sour");
    event.put("metB", 20L);
    row = new MapBasedInputRow(1505264400000L, dimNames, event);
    indexB.add(row);
    final File fileB = INDEX_MERGER_V9.persist(indexB, new File(tmpDir, "B"), new IndexSpec(), null);
    QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
    groupByIndices = Arrays.asList(qindexA, qindexB);
    setupGroupByFactory();
}
Also used : IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) QueryableIndex(org.apache.druid.segment.QueryableIndex) InputRow(org.apache.druid.data.input.InputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) File(java.io.File) Before(org.junit.Before)

Example 17 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class SpatialFilterBonusTest method makeIncrementalIndex.

private static IncrementalIndex makeIncrementalIndex() throws IOException {
    IncrementalIndex theIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Collections.singletonList(new SpatialDimensionSchema("dim.geo", new ArrayList<>()))).build()).build()).setMaxRowCount(NUM_POINTS).build();
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L)));
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L)));
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L)));
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L)));
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L)));
    theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
    // Add a bunch of random points, without replacement
    Set<String> alreadyChosen = new HashSet<>();
    Random rand = ThreadLocalRandom.current();
    for (int i = 6; i < NUM_POINTS; i++) {
        String coord = null;
        while (coord == null) {
            coord = StringUtils.format("%s,%s", (float) (rand.nextFloat() * 10 + 10.0), (float) (rand.nextFloat() * 10 + 10.0));
            if (!alreadyChosen.add(coord)) {
                coord = null;
            }
        }
        theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", coord, "val", i)));
    }
    return theIndex;
}
Also used : Random(java.util.Random) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) SpatialDimensionSchema(org.apache.druid.data.input.impl.SpatialDimensionSchema) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) HashSet(java.util.HashSet)

Example 18 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class SpatialFilterBonusTest method constructorFeeder.

@Parameterized.Parameters
public static Collection<?> constructorFeeder() throws IOException {
    List<Object[]> argumentArrays = new ArrayList<>();
    for (SegmentWriteOutMediumFactory segmentWriteOutMediumFactory : SegmentWriteOutMediumFactory.builtInFactories()) {
        IndexMerger indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory);
        IndexIO indexIO = TestHelper.getTestIndexIO();
        final IndexSpec indexSpec = new IndexSpec();
        final IncrementalIndex rtIndex = makeIncrementalIndex();
        final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec, indexMerger, indexIO);
        final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec, indexMerger, indexIO);
        argumentArrays.add(new Object[] { new IncrementalIndexSegment(rtIndex, null) });
        argumentArrays.add(new Object[] { new QueryableIndexSegment(mMappedTestIndex, null) });
        argumentArrays.add(new Object[] { new QueryableIndexSegment(mergedRealtimeIndex, null) });
    }
    return argumentArrays;
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) IndexMerger(org.apache.druid.segment.IndexMerger) IndexSpec(org.apache.druid.segment.IndexSpec) IndexIO(org.apache.druid.segment.IndexIO) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex) ArrayList(java.util.ArrayList) SegmentWriteOutMediumFactory(org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory)

Example 19 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class SpatialFilterBonusTest method makeMergedQueryableIndex.

private static QueryableIndex makeMergedQueryableIndex(final IndexSpec indexSpec, final IndexMerger indexMerger, final IndexIO indexIO) {
    try {
        IncrementalIndex first = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Collections.singletonList(new SpatialDimensionSchema("dim.geo", new ArrayList<>()))).build()).build()).setMaxRowCount(NUM_POINTS).build();
        IncrementalIndex second = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Collections.singletonList(new SpatialDimensionSchema("dim.geo", new ArrayList<>()))).build()).build()).setMaxRowCount(NUM_POINTS).build();
        IncrementalIndex third = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Collections.singletonList(new SpatialDimensionSchema("dim.geo", new ArrayList<>()))).build()).build()).setMaxRowCount(NUM_POINTS).build();
        first.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L)));
        first.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L)));
        first.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L)));
        first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
        second.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L)));
        second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L)));
        // Add a bunch of random points
        Random rand = ThreadLocalRandom.current();
        for (int i = 6; i < NUM_POINTS; i++) {
            third.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", StringUtils.format("%s,%s", (float) (rand.nextFloat() * 10 + 10.0), (float) (rand.nextFloat() * 10 + 10.0)), "val", i)));
        }
        File tmpFile = File.createTempFile("yay", "who");
        tmpFile.delete();
        File firstFile = new File(tmpFile, "first");
        File secondFile = new File(tmpFile, "second");
        File thirdFile = new File(tmpFile, "third");
        File mergedFile = new File(tmpFile, "merged");
        FileUtils.mkdirp(firstFile);
        FileUtils.mkdirp(secondFile);
        FileUtils.mkdirp(thirdFile);
        FileUtils.mkdirp(mergedFile);
        firstFile.deleteOnExit();
        secondFile.deleteOnExit();
        thirdFile.deleteOnExit();
        mergedFile.deleteOnExit();
        indexMerger.persist(first, DATA_INTERVAL, firstFile, indexSpec, null);
        indexMerger.persist(second, DATA_INTERVAL, secondFile, indexSpec, null);
        indexMerger.persist(third, DATA_INTERVAL, thirdFile, indexSpec, null);
        QueryableIndex mergedRealtime = indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(indexIO.loadIndex(firstFile), indexIO.loadIndex(secondFile), indexIO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec, null, -1));
        return mergedRealtime;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) Random(java.util.Random) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) QueryableIndex(org.apache.druid.segment.QueryableIndex) SpatialDimensionSchema(org.apache.druid.data.input.impl.SpatialDimensionSchema) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) File(java.io.File)

Example 20 with IncrementalIndex

use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.

the class SpatialFilterTest method constructorFeeder.

@Parameterized.Parameters
public static Collection<?> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();
    final IncrementalIndex rtIndex = makeIncrementalIndex();
    final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec);
    final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec);
    return Arrays.asList(new Object[][] { { new IncrementalIndexSegment(rtIndex, null) }, { new QueryableIndexSegment(mMappedTestIndex, null) }, { new QueryableIndexSegment(mergedRealtimeIndex, null) } });
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) IndexSpec(org.apache.druid.segment.IndexSpec) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex)

Aggregations

IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)109 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)85 File (java.io.File)59 Test (org.junit.Test)51 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)46 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)26 IncrementalIndexTest (org.apache.druid.segment.data.IncrementalIndexTest)26 ArrayList (java.util.ArrayList)25 IncrementalIndexSchema (org.apache.druid.segment.incremental.IncrementalIndexSchema)25 IndexSpec (org.apache.druid.segment.IndexSpec)19 QueryableIndex (org.apache.druid.segment.QueryableIndex)19 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)17 InputRow (org.apache.druid.data.input.InputRow)15 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)14 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)12 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)11 IOException (java.io.IOException)10 Before (org.junit.Before)10 Interval (org.joda.time.Interval)9