Search in sources :

Example 36 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IndexIOTest method setUp.

@Before
public void setUp() throws IndexSizeExceededException {
    long timestamp = 0L;
    for (Map<String, Object> event : events1) {
        incrementalIndex1.add(new MapBasedInputRow(timestamp++, Lists.newArrayList(event.keySet()), event));
    }
    timestamp = 0L;
    for (Map<String, Object> event : events2) {
        incrementalIndex2.add(new MapBasedInputRow(timestamp++, Lists.newArrayList(event.keySet()), event));
    }
    adapter2 = new IncrementalIndexAdapter(DEFAULT_INTERVAL, incrementalIndex2, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
    adapter1 = new IncrementalIndexAdapter(DEFAULT_INTERVAL, incrementalIndex1, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
}
Also used : IncrementalIndexAdapter(io.druid.segment.incremental.IncrementalIndexAdapter) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Before(org.junit.Before)

Example 37 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class StringDimensionHandlerTest method getAdapters.

private static Pair<IncrementalIndexAdapter, IncrementalIndexAdapter> getAdapters(List<String> dims, Map<String, Object> event1, Map<String, Object> event2) throws Exception {
    IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex(TEST_INTERVAL.getStartMillis(), Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    incrementalIndex1.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis(), dims, event1));
    incrementalIndex2.add(new MapBasedInputRow(TEST_INTERVAL.getStartMillis() + 3, dims, event2));
    IncrementalIndexAdapter adapter1 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex1, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
    IncrementalIndexAdapter adapter2 = new IncrementalIndexAdapter(TEST_INTERVAL, incrementalIndex2, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory());
    return new Pair<>(adapter1, adapter2);
}
Also used : IncrementalIndexAdapter(io.druid.segment.incremental.IncrementalIndexAdapter) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Pair(io.druid.java.util.common.Pair)

Example 38 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IndexMergerV9WithSpatialIndexTest method makeMergedQueryableIndex.

private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) {
    try {
        IncrementalIndex first = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, 1000);
        IncrementalIndex second = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, 1000);
        IncrementalIndex third = new OnheapIncrementalIndex(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2"))))).build(), false, NUM_POINTS);
        first.add(new MapBasedInputRow(new DateTime("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-02").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-03").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L)));
        first.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-04").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L)));
        second.add(new MapBasedInputRow(new DateTime("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L)));
        // Add a bunch of random points
        Random rand = new Random();
        for (int i = 8; i < NUM_POINTS; i++) {
            third.add(new MapBasedInputRow(new DateTime("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", new DateTime("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i)));
        }
        File tmpFile = File.createTempFile("yay", "who");
        tmpFile.delete();
        File firstFile = new File(tmpFile, "first");
        File secondFile = new File(tmpFile, "second");
        File thirdFile = new File(tmpFile, "third");
        File mergedFile = new File(tmpFile, "merged");
        firstFile.mkdirs();
        secondFile.mkdirs();
        thirdFile.mkdirs();
        mergedFile.mkdirs();
        INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec);
        INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec);
        INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec);
        try {
            QueryableIndex mergedRealtime = INDEX_IO.loadIndex(INDEX_MERGER_V9.mergeQueryableIndex(Arrays.asList(INDEX_IO.loadIndex(firstFile), INDEX_IO.loadIndex(secondFile), INDEX_IO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec));
            return mergedRealtime;
        } finally {
            FileUtils.deleteDirectory(firstFile);
            FileUtils.deleteDirectory(secondFile);
            FileUtils.deleteDirectory(thirdFile);
            FileUtils.deleteDirectory(mergedFile);
        }
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) DateTime(org.joda.time.DateTime) Random(java.util.Random) SpatialDimensionSchema(io.druid.data.input.impl.SpatialDimensionSchema) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) File(java.io.File)

Example 39 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IndexMergerTest method testAddMetricsBothSidesNull.

@Test
public void testAddMetricsBothSidesNull() throws IOException {
    IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
    closer.closeLater(index1);
    long timestamp = System.currentTimeMillis();
    index1.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "A", 5)));
    IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
    index2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "A", 5, "C", 6)));
    closer.closeLater(index2);
    IncrementalIndex index3 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
    index3.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "A", 5)));
    Interval interval = new Interval(0, new DateTime().getMillis());
    RoaringBitmapFactory factory = new RoaringBitmapFactory();
    ArrayList<IndexableAdapter> toMerge = Lists.<IndexableAdapter>newArrayList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory), new IncrementalIndexAdapter(interval, index3, factory));
    final File tmpDirMerged = temporaryFolder.newFolder();
    File merged = INDEX_MERGER.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") }, tmpDirMerged, indexSpec);
    final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(INDEX_IO.loadIndex(merged)));
    Assert.assertEquals(ImmutableSet.of("A", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DateTime(org.joda.time.DateTime) IncrementalIndexAdapter(io.druid.segment.incremental.IncrementalIndexAdapter) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) RoaringBitmapFactory(io.druid.collections.bitmap.RoaringBitmapFactory) File(java.io.File) Interval(org.joda.time.Interval) IncrementalIndexTest(io.druid.segment.data.IncrementalIndexTest) Test(org.junit.Test)

Example 40 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IndexMergerTest method testPersistWithDifferentDims.

@Test
public void testPersistWithDifferentDims() throws Exception {
    IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null);
    toPersist.add(new MapBasedInputRow(1, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2")));
    toPersist.add(new MapBasedInputRow(1, Arrays.asList("dim1"), ImmutableMap.<String, Object>of("dim1", "3")));
    final File tempDir = temporaryFolder.newFolder();
    QueryableIndex index = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist, tempDir, indexSpec)));
    Assert.assertEquals(2, index.getColumn(Column.TIME_COLUMN_NAME).getLength());
    Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions()));
    Assert.assertEquals(3, index.getColumnNames().size());
    assertDimCompression(index, indexSpec.getDimensionCompression());
    final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(index);
    final List<Rowboat> boatList = ImmutableList.copyOf(adapter.getRows());
    Assert.assertEquals(2, boatList.size());
    Assert.assertArrayEquals(new int[][] { { 0 }, { 1 } }, boatList.get(0).getDims());
    Assert.assertArrayEquals(new int[][] { { 1 }, { 0 } }, boatList.get(1).getDims());
    checkBitmapIndex(new ArrayList<Integer>(), adapter.getBitmapIndex("dim1", ""));
    checkBitmapIndex(Lists.newArrayList(0), adapter.getBitmapIndex("dim1", "1"));
    checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("dim1", "3"));
    checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("dim2", ""));
    checkBitmapIndex(Lists.newArrayList(0), adapter.getBitmapIndex("dim2", "2"));
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) File(java.io.File) IncrementalIndexTest(io.druid.segment.data.IncrementalIndexTest) Test(org.junit.Test)

Aggregations

MapBasedInputRow (io.druid.data.input.MapBasedInputRow)73 Test (org.junit.Test)51 DateTime (org.joda.time.DateTime)38 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)32 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)30 File (java.io.File)19 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)13 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)12 InputRow (io.druid.data.input.InputRow)11 IncrementalIndexTest (io.druid.segment.data.IncrementalIndexTest)11 Interval (org.joda.time.Interval)11 IOException (java.io.IOException)10 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)9 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)8 Row (io.druid.data.input.Row)7 TaskStatus (io.druid.indexing.common.TaskStatus)7 TaskToolbox (io.druid.indexing.common.TaskToolbox)7 TestIndexerMetadataStorageCoordinator (io.druid.indexing.test.TestIndexerMetadataStorageCoordinator)7 SpatialDimensionSchema (io.druid.data.input.impl.SpatialDimensionSchema)6 Pair (io.druid.java.util.common.Pair)6