use of org.apache.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerV9 method persist.
@Override
public File persist(final IncrementalIndex index, final Interval dataInterval, File outDir, IndexSpec indexSpec, ProgressIndicator progress, @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) throws IOException {
if (index.isEmpty()) {
throw new IAE("Trying to persist an empty index!");
}
final DateTime firstTimestamp = index.getMinTime();
final DateTime lastTimestamp = index.getMaxTime();
if (!(dataInterval.contains(firstTimestamp) && dataInterval.contains(lastTimestamp))) {
throw new IAE("interval[%s] does not encapsulate the full range of timestamps[%s, %s]", dataInterval, firstTimestamp, lastTimestamp);
}
FileUtils.mkdirp(outDir);
log.debug("Starting persist for interval[%s], rows[%,d]", dataInterval, index.size());
return multiphaseMerge(Collections.singletonList(new IncrementalIndexAdapter(dataInterval, index, indexSpec.getBitmapSerdeFactory().getBitmapFactory())), // while merging a single iterable
false, index.getMetricAggs(), null, outDir, indexSpec, indexSpec, progress, segmentWriteOutMediumFactory, -1);
}
use of org.apache.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class EmptyIndexTest method testEmptyIndex.
@Test
public void testEmptyIndex() throws Exception {
File tmpDir = File.createTempFile("emptyIndex", "");
if (!tmpDir.delete()) {
throw new IllegalStateException("tmp delete failed");
}
if (!tmpDir.mkdir()) {
throw new IllegalStateException("tmp mkdir failed");
}
try {
IncrementalIndex emptyIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema().setMaxRowCount(1000).build();
IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter(Intervals.of("2012-08-01/P3D"), emptyIndex, new ConciseBitmapFactory());
TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory).merge(Collections.singletonList(emptyIndexAdapter), true, new AggregatorFactory[0], tmpDir, new IndexSpec(), -1);
QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir);
Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions()));
Assert.assertEquals("getMetricNames", 0, emptyQueryableIndex.getColumnNames().size());
Assert.assertEquals("getDataInterval", Intervals.of("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval());
Assert.assertEquals("getReadOnlyTimestamps", 0, emptyQueryableIndex.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
} finally {
FileUtils.deleteDirectory(tmpDir);
}
}
use of org.apache.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTestBase method testMismatchedMetricsVarying.
@Test(expected = IAE.class)
public void testMismatchedMetricsVarying() throws IOException {
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
closer.closeLater(index2);
IncrementalIndex index5 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index5);
Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
List<IndexableAdapter> toMerge = Collections.singletonList(new IncrementalIndexAdapter(interval, index2, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
final File merged = indexMerger.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("B", "B"), new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("D", "D") }, tmpDirMerged, indexSpec, -1);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(indexIO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "B", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of org.apache.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTestBase method testAddMetricsBothSidesNull.
@Test
public void testAddMetricsBothSidesNull() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
long timestamp = System.currentTimeMillis();
index1.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5)));
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
index2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5, "C", 6)));
closer.closeLater(index2);
IncrementalIndex index3 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
index3.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5)));
Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
List<IndexableAdapter> toMerge = Arrays.asList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory), new IncrementalIndexAdapter(interval, index3, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = indexMerger.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") }, tmpDirMerged, indexSpec, -1);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(indexIO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of org.apache.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTestBase method testAddMetrics.
@Test
public void testAddMetrics() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
long timestamp = System.currentTimeMillis();
index1.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5)));
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
index2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5, "C", 6)));
closer.closeLater(index2);
Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
List<IndexableAdapter> toMerge = Arrays.asList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = indexMerger.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") }, tmpDirMerged, indexSpec, -1);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(indexIO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
Aggregations