use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class TestIndex method mergedRealtimeIndex.
public static QueryableIndex mergedRealtimeIndex() {
synchronized (log) {
if (mergedRealtime != null) {
return mergedRealtime;
}
try {
IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top");
IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom");
File tmpFile = File.createTempFile("yay", "who");
tmpFile.delete();
File topFile = new File(tmpFile, "top");
File bottomFile = new File(tmpFile, "bottom");
File mergedFile = new File(tmpFile, "merged");
topFile.mkdirs();
topFile.deleteOnExit();
bottomFile.mkdirs();
bottomFile.deleteOnExit();
mergedFile.mkdirs();
mergedFile.deleteOnExit();
INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec);
INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec);
mergedRealtime = INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), true, METRIC_AGGS, mergedFile, indexSpec));
return mergedRealtime;
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class TestIndex method getNoRollupMMappedTestIndex.
public static QueryableIndex getNoRollupMMappedTestIndex() {
synchronized (log) {
if (noRollupMmappedIndex != null) {
return noRollupMmappedIndex;
}
}
IncrementalIndex incrementalIndex = getNoRollupIncrementalTestIndex();
noRollupMmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex);
return noRollupMmappedIndex;
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTest method testMergeSpecChange.
@Test
public void testMergeSpecChange() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
final File tempDir1 = temporaryFolder.newFolder();
final File mergedDir = temporaryFolder.newFolder();
final IndexableAdapter incrementalAdapter = new IncrementalIndexAdapter(toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory().getBitmapFactory());
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tempDir1, indexSpec)));
final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1);
INDEX_IO.validateTwoSegments(incrementalAdapter, queryableAdapter);
Assert.assertEquals(2, index1.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
IndexSpec newSpec = new IndexSpec(indexSpec.getBitmapSerdeFactory(), CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ? CompressionFactory.LongEncodingStrategy.AUTO : CompressionFactory.LongEncodingStrategy.LONGS);
AggregatorFactory[] mergedAggregators = new AggregatorFactory[] { new CountAggregatorFactory("count") };
QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(ImmutableList.of(index1), true, mergedAggregators, mergedDir, newSpec)));
Assert.assertEquals(2, merged.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions()));
Assert.assertEquals(3, merged.getColumnNames().size());
INDEX_IO.validateTwoSegments(tempDir1, mergedDir);
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(merged, newSpec.getDimensionCompression());
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTest method testPersistWithSegmentMetadata.
@Test
public void testPersistWithSegmentMetadata() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist);
Map<String, Object> metadataElems = ImmutableMap.<String, Object>of("key", "value");
toPersist.getMetadata().putAll(metadataElems);
final File tempDir = temporaryFolder.newFolder();
QueryableIndex index = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist, tempDir, indexSpec)));
Assert.assertEquals(2, index.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions()));
Assert.assertEquals(3, index.getColumnNames().size());
assertDimCompression(index, indexSpec.getDimensionCompression());
Assert.assertEquals(new Metadata().setAggregators(IncrementalIndexTest.getDefaultCombiningAggregatorFactories()).setQueryGranularity(Granularities.NONE).setRollup(Boolean.TRUE).putAll(metadataElems), index.getMetadata());
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTest method testMergeNumericDims.
@Test
public void testMergeNumericDims() throws Exception {
IncrementalIndex toPersist1 = getIndexWithNumericDims();
IncrementalIndex toPersist2 = getIndexWithNumericDims();
final File tmpDir = temporaryFolder.newFolder();
final File tmpDir2 = temporaryFolder.newFolder();
final File tmpDirMerged = temporaryFolder.newFolder();
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tmpDir, indexSpec)));
QueryableIndex index2 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist2, tmpDir2, indexSpec)));
final QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(Arrays.asList(index1, index2), true, new AggregatorFactory[] { new CountAggregatorFactory("count") }, tmpDirMerged, indexSpec)));
final IndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
Iterable<Rowboat> boats = adapter.getRows();
List<Rowboat> boatList = Lists.newArrayList(boats);
Assert.assertEquals(ImmutableList.of("dimA", "dimB", "dimC"), ImmutableList.copyOf(adapter.getDimensionNames()));
Assert.assertEquals(4, boatList.size());
Assert.assertArrayEquals(new Object[] { 0L, 0.0f, new int[] { 2 } }, boatList.get(0).getDims());
Assert.assertArrayEquals(new Object[] { 2L }, boatList.get(0).getMetrics());
Assert.assertArrayEquals(new Object[] { 72L, 60000.789f, new int[] { 3 } }, boatList.get(1).getDims());
Assert.assertArrayEquals(new Object[] { 2L }, boatList.get(0).getMetrics());
Assert.assertArrayEquals(new Object[] { 100L, 4000.567f, new int[] { 1 } }, boatList.get(2).getDims());
Assert.assertArrayEquals(new Object[] { 2L }, boatList.get(1).getMetrics());
Assert.assertArrayEquals(new Object[] { 3001L, 1.2345f, new int[] { 0 } }, boatList.get(3).getDims());
Assert.assertArrayEquals(new Object[] { 2L }, boatList.get(2).getMetrics());
}
Aggregations