use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method makeIncrementalIndex.
private static IncrementalIndex makeIncrementalIndex() throws IOException {
IncrementalIndex theIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")))).build()).build()).setMaxRowCount(NUM_POINTS).build();
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L)));
// Add a bunch of random points
Random rand = ThreadLocalRandom.current();
for (int i = 8; i < NUM_POINTS; i++) {
theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i)));
}
return theIndex;
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method constructorFeeder.
@Parameterized.Parameters
public static Collection<?> constructorFeeder() throws IOException {
List<Object[]> argumentArrays = new ArrayList<>();
for (SegmentWriteOutMediumFactory segmentWriteOutMediumFactory : SegmentWriteOutMediumFactory.builtInFactories()) {
IndexMergerV9 indexMergerV9 = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory);
IndexIO indexIO = TestHelper.getTestIndexIO();
final IndexSpec indexSpec = new IndexSpec();
final IncrementalIndex rtIndex = makeIncrementalIndex();
final QueryableIndex mMappedTestIndex = makeQueryableIndex(indexSpec, indexMergerV9, indexIO);
final QueryableIndex mergedRealtimeIndex = makeMergedQueryableIndex(indexSpec, indexMergerV9, indexIO);
argumentArrays.add(new Object[] { new IncrementalIndexSegment(rtIndex, null) });
argumentArrays.add(new Object[] { new QueryableIndexSegment(mMappedTestIndex, null) });
argumentArrays.add(new Object[] { new QueryableIndexSegment(mergedRealtimeIndex, null) });
}
return argumentArrays;
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method makeMergedQueryableIndex.
private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec, IndexMergerV9 indexMergerV9, IndexIO indexIO) {
try {
IncrementalIndex first = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")))).build()).build()).setMaxRowCount(1000).build();
IncrementalIndex second = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")))).build()).build()).setMaxRowCount(1000).build();
IncrementalIndex third = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()).withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS).withDimensionsSpec(DimensionsSpec.builder().setSpatialDimensions(Arrays.asList(new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")))).build()).build()).setMaxRowCount(NUM_POINTS).build();
first.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L)));
first.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L)));
first.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L)));
first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L)));
first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L)));
second.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L)));
second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L)));
second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L)));
// Add a bunch of random points
Random rand = ThreadLocalRandom.current();
for (int i = 8; i < NUM_POINTS; i++) {
third.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i)));
}
File tmpFile = File.createTempFile("yay", "who");
tmpFile.delete();
File firstFile = new File(tmpFile, "first");
File secondFile = new File(tmpFile, "second");
File thirdFile = new File(tmpFile, "third");
File mergedFile = new File(tmpFile, "merged");
FileUtils.mkdirp(firstFile);
FileUtils.mkdirp(secondFile);
FileUtils.mkdirp(thirdFile);
FileUtils.mkdirp(mergedFile);
indexMergerV9.persist(first, DATA_INTERVAL, firstFile, indexSpec, null);
indexMergerV9.persist(second, DATA_INTERVAL, secondFile, indexSpec, null);
indexMergerV9.persist(third, DATA_INTERVAL, thirdFile, indexSpec, null);
try {
QueryableIndex mergedRealtime = indexIO.loadIndex(indexMergerV9.mergeQueryableIndex(Arrays.asList(indexIO.loadIndex(firstFile), indexIO.loadIndex(secondFile), indexIO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec, null, -1));
return mergedRealtime;
} finally {
FileUtils.deleteDirectory(firstFile);
FileUtils.deleteDirectory(secondFile);
FileUtils.deleteDirectory(thirdFile);
FileUtils.deleteDirectory(mergedFile);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class TimeseriesQueryRunnerBonusTest method testOneRowAtATime.
@Test
public void testOneRowAtATime() throws Exception {
final IncrementalIndex oneRowIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2012-01-01T00:00:00Z").getMillis()).build()).setMaxRowCount(1000).build();
List<Result<TimeseriesResultValue>> results;
oneRowIndex.add(new MapBasedInputRow(DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x")));
results = runTimeseriesCount(oneRowIndex);
Assert.assertEquals("index size", 1, oneRowIndex.size());
Assert.assertEquals("result size", 1, results.size());
Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows"));
oneRowIndex.add(new MapBasedInputRow(DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "y")));
results = runTimeseriesCount(oneRowIndex);
Assert.assertEquals("index size", 2, oneRowIndex.size());
Assert.assertEquals("result size", 1, results.size());
Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows"));
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class CustomSegmentizerFactoryTest method testCustomSegmentizerPersist.
@Test
public void testCustomSegmentizerPersist() throws IOException {
IncrementalIndex data = TestIndex.makeRealtimeIndex("druid.sample.numeric.tsv");
File segment = new File(temporaryFolder.newFolder(), "segment");
File persisted = INDEX_MERGER.persist(data, Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"), segment, new IndexSpec(null, null, null, null, new CustomSegmentizerFactory()), null);
File factoryJson = new File(persisted, "factory.json");
Assert.assertTrue(factoryJson.exists());
SegmentizerFactory factory = JSON_MAPPER.readValue(factoryJson, SegmentizerFactory.class);
Assert.assertTrue(factory instanceof CustomSegmentizerFactory);
}
Aggregations