use of io.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class StringDimensionHandlerTest method testValidateSortedEncodedArrays.
@Test
public void testValidateSortedEncodedArrays() throws Exception {
Map<String, Object> event1 = ImmutableMap.<String, Object>of("penguins", Arrays.asList("adelie", "emperor"), "predators", Arrays.asList("seal"));
Map<String, Object> event2 = ImmutableMap.<String, Object>of("penguins", Arrays.asList("adelie", "emperor"), "predators", Arrays.asList("seal"));
Pair<IncrementalIndexAdapter, IncrementalIndexAdapter> adapters = getAdapters(dimensions, event1, event2);
IncrementalIndexAdapter adapter1 = adapters.lhs;
IncrementalIndexAdapter adapter2 = adapters.rhs;
validate(adapter1, adapter2);
}
use of io.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTest method testMergeSpecChange.
@Test
public void testMergeSpecChange() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
final File tempDir1 = temporaryFolder.newFolder();
final File mergedDir = temporaryFolder.newFolder();
final IndexableAdapter incrementalAdapter = new IncrementalIndexAdapter(toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory().getBitmapFactory());
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tempDir1, indexSpec)));
final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1);
INDEX_IO.validateTwoSegments(incrementalAdapter, queryableAdapter);
Assert.assertEquals(2, index1.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
IndexSpec newSpec = new IndexSpec(indexSpec.getBitmapSerdeFactory(), CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ? CompressionFactory.LongEncodingStrategy.AUTO : CompressionFactory.LongEncodingStrategy.LONGS);
AggregatorFactory[] mergedAggregators = new AggregatorFactory[] { new CountAggregatorFactory("count") };
QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(ImmutableList.of(index1), true, mergedAggregators, mergedDir, newSpec)));
Assert.assertEquals(2, merged.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions()));
Assert.assertEquals(3, merged.getColumnNames().size());
INDEX_IO.validateTwoSegments(tempDir1, mergedDir);
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(merged, newSpec.getDimensionCompression());
}
use of io.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTest method testConvertDifferent.
@Test
public void testConvertDifferent() throws Exception {
final long timestamp = System.currentTimeMillis();
final AggregatorFactory[] aggregators = new AggregatorFactory[] { new LongSumAggregatorFactory("longSum1", "dim1"), new LongSumAggregatorFactory("longSum2", "dim2") };
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(aggregators);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
final File tempDir1 = temporaryFolder.newFolder();
final File convertDir = temporaryFolder.newFolder();
final IndexableAdapter incrementalAdapter = new IncrementalIndexAdapter(toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory().getBitmapFactory());
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tempDir1, indexSpec)));
final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1);
INDEX_IO.validateTwoSegments(incrementalAdapter, queryableAdapter);
Assert.assertEquals(2, index1.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(4, index1.getColumnNames().size());
IndexSpec newSpec = new IndexSpec(indexSpec.getBitmapSerdeFactory(), CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressedObjectStrategy.CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressedObjectStrategy.CompressionStrategy.LZF : CompressedObjectStrategy.CompressionStrategy.LZ4, CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ? CompressionFactory.LongEncodingStrategy.AUTO : CompressionFactory.LongEncodingStrategy.LONGS);
QueryableIndex converted = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.convert(tempDir1, convertDir, newSpec)));
Assert.assertEquals(2, converted.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(converted.getAvailableDimensions()));
Assert.assertEquals(4, converted.getColumnNames().size());
INDEX_IO.validateTwoSegments(tempDir1, convertDir);
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(converted, newSpec.getDimensionCompression());
Assert.assertArrayEquals(getCombiningAggregators(aggregators), converted.getMetadata().getAggregators());
}
use of io.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTest method testMergeRetainsValues.
@Test
public void testMergeRetainsValues() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
final File tempDir1 = temporaryFolder.newFolder();
final File mergedDir = temporaryFolder.newFolder();
final IndexableAdapter incrementalAdapter = new IncrementalIndexAdapter(toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory().getBitmapFactory());
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tempDir1, indexSpec)));
final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1);
INDEX_IO.validateTwoSegments(incrementalAdapter, queryableAdapter);
Assert.assertEquals(2, index1.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(ImmutableList.of(index1), true, new AggregatorFactory[] { new CountAggregatorFactory("count") }, mergedDir, indexSpec)));
Assert.assertEquals(2, merged.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions()));
Assert.assertEquals(3, merged.getColumnNames().size());
INDEX_IO.validateTwoSegments(tempDir1, mergedDir);
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(merged, indexSpec.getDimensionCompression());
}
use of io.druid.segment.incremental.IncrementalIndexAdapter in project druid by druid-io.
the class IndexMergerTest method testMismatchedMetrics.
@Test
public void testMismatchedMetrics() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
closer.closeLater(index2);
IncrementalIndex index3 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index3);
IncrementalIndex index4 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index4);
IncrementalIndex index5 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index5);
Interval interval = new Interval(0, new DateTime().getMillis());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
ArrayList<IndexableAdapter> toMerge = Lists.<IndexableAdapter>newArrayList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory), new IncrementalIndexAdapter(interval, index3, factory), new IncrementalIndexAdapter(interval, index4, factory), new IncrementalIndexAdapter(interval, index5, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = INDEX_MERGER.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("B", "B"), new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("D", "D") }, tmpDirMerged, indexSpec);
// Since D was not present in any of the indices, it is not present in the output
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(INDEX_IO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "B", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
Aggregations