use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testPersistEmptyColumn.
@Test
public void testPersistEmptyColumn() throws Exception {
final IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema().setMaxRowCount(10).build();
final IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema().setMaxRowCount(10).build();
final File tmpDir1 = temporaryFolder.newFolder();
final File tmpDir2 = temporaryFolder.newFolder();
final File tmpDir3 = temporaryFolder.newFolder();
toPersist1.add(new MapBasedInputRow(1L, ImmutableList.of("dim1", "dim2"), ImmutableMap.of("dim1", ImmutableList.of(), "dim2", "foo")));
toPersist2.add(new MapBasedInputRow(1L, ImmutableList.of("dim1", "dim2"), ImmutableMap.of("dim1", ImmutableList.of(), "dim2", "bar")));
final QueryableIndex index1 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist1, tmpDir1, indexSpec, null)));
final QueryableIndex index2 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist2, tmpDir2, indexSpec, null)));
final QueryableIndex merged = closer.closeLater(indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(index1, index2), true, new AggregatorFactory[] {}, tmpDir3, indexSpec, null, -1)));
Assert.assertEquals(1, index1.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(ImmutableList.of("dim2"), ImmutableList.copyOf(index1.getAvailableDimensions()));
Assert.assertEquals(1, index2.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(ImmutableList.of("dim2"), ImmutableList.copyOf(index2.getAvailableDimensions()));
Assert.assertEquals(2, merged.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(ImmutableList.of("dim2"), ImmutableList.copyOf(merged.getAvailableDimensions()));
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(index2, indexSpec.getDimensionCompression());
assertDimCompression(merged, indexSpec.getDimensionCompression());
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testMismatchedMetricsVarying.
@Test(expected = IAE.class)
public void testMismatchedMetricsVarying() throws IOException {
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
closer.closeLater(index2);
IncrementalIndex index5 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("C", "C"), new LongSumAggregatorFactory("B", "B") });
closer.closeLater(index5);
Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
List<IndexableAdapter> toMerge = Collections.singletonList(new IncrementalIndexAdapter(interval, index2, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
final File merged = indexMerger.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("B", "B"), new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("D", "D") }, tmpDirMerged, indexSpec, -1);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(indexIO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "B", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testPersistNullColumnSkipping.
@Test
public void testPersistNullColumnSkipping() throws Exception {
// check that column d2 is skipped because it only has null values
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
index1.add(new MapBasedInputRow(1L, Arrays.asList("d1", "d2"), ImmutableMap.of("d1", "a", "A", 1)));
index1.add(new MapBasedInputRow(1L, Arrays.asList("d1", "d2"), ImmutableMap.of("d1", "b", "A", 1)));
final File tempDir = temporaryFolder.newFolder();
QueryableIndex index = closer.closeLater(indexIO.loadIndex(indexMerger.persist(index1, tempDir, indexSpec, null)));
List<String> expectedColumnNames = Arrays.asList("A", "d1");
List<String> actualColumnNames = Lists.newArrayList(index.getColumnNames());
Collections.sort(expectedColumnNames);
Collections.sort(actualColumnNames);
Assert.assertEquals(expectedColumnNames, actualColumnNames);
SmooshedFileMapper sfm = closer.closeLater(SmooshedFileMapper.load(tempDir));
List<String> expectedFilenames = Arrays.asList("A", "__time", "d1", "index.drd", "metadata.drd");
List<String> actualFilenames = new ArrayList<>(sfm.getInternalFilenames());
Collections.sort(expectedFilenames);
Collections.sort(actualFilenames);
Assert.assertEquals(expectedFilenames, actualFilenames);
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testPersistWithSegmentMetadata.
@Test
public void testPersistWithSegmentMetadata() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist);
Map<String, Object> metadataElems = ImmutableMap.of("key", "value");
toPersist.getMetadata().putAll(metadataElems);
final File tempDir = temporaryFolder.newFolder();
QueryableIndex index = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)));
Assert.assertEquals(2, index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions()));
Assert.assertEquals(3, index.getColumnNames().size());
assertDimCompression(index, indexSpec.getDimensionCompression());
Assert.assertEquals(new Metadata(metadataElems, IncrementalIndexTest.getDefaultCombiningAggregatorFactories(), null, Granularities.NONE, Boolean.TRUE), index.getMetadata());
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testAddMetricsBothSidesNull.
@Test
public void testAddMetricsBothSidesNull() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
long timestamp = System.currentTimeMillis();
index1.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5)));
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
index2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5, "C", 6)));
closer.closeLater(index2);
IncrementalIndex index3 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
index3.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2", "A", 5)));
Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
List<IndexableAdapter> toMerge = Arrays.asList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory), new IncrementalIndexAdapter(interval, index3, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = indexMerger.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") }, tmpDirMerged, indexSpec, -1);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(indexIO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
Aggregations