use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method persistAndLoad.
private QueryableIndex persistAndLoad(List<DimensionSchema> schema, InputRow... rows) throws IOException {
IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null, new DimensionsSpec(schema));
for (InputRow row : rows) {
toPersist.add(row);
}
final File tempDir = temporaryFolder.newFolder();
return closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)));
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testMergeWithDimensionsList.
@Test
public void testMergeWithDimensionsList() throws Exception {
IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withDimensionsSpec(new DimensionsSpec(makeDimensionSchemas(Arrays.asList("dimA", "dimB", "dimC")))).withMetrics(new CountAggregatorFactory("count")).build();
IncrementalIndex toPersist1 = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(1000).build();
IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(1000).build();
IncrementalIndex toPersist3 = new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(1000).build();
addDimValuesToIndex(toPersist1, "dimA", Arrays.asList("1", "2"));
addDimValuesToIndex(toPersist2, "dimA", Arrays.asList("1", "2"));
addDimValuesToIndex(toPersist3, "dimC", Arrays.asList("1", "2"));
final File tmpDir = temporaryFolder.newFolder();
final File tmpDir2 = temporaryFolder.newFolder();
final File tmpDir3 = temporaryFolder.newFolder();
final File tmpDirMerged = temporaryFolder.newFolder();
QueryableIndex index1 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist1, tmpDir, indexSpec, null)));
QueryableIndex index2 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist2, tmpDir2, indexSpec, null)));
QueryableIndex index3 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist3, tmpDir3, indexSpec, null)));
final QueryableIndex merged = closer.closeLater(indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(index1, index2, index3), true, new AggregatorFactory[] { new CountAggregatorFactory("count") }, tmpDirMerged, indexSpec, null, -1)));
final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
Assert.assertEquals(ImmutableList.of("dimA", "dimC"), ImmutableList.copyOf(adapter.getDimensionNames()));
Assert.assertEquals(4, rowList.size());
Assert.assertEquals(Arrays.asList(null, "1"), rowList.get(0).dimensionValues());
Assert.assertEquals(Collections.singletonList(1L), rowList.get(0).metricValues());
Assert.assertEquals(Arrays.asList(null, "2"), rowList.get(1).dimensionValues());
Assert.assertEquals(Collections.singletonList(1L), rowList.get(1).metricValues());
Assert.assertEquals(Arrays.asList("1", null), rowList.get(2).dimensionValues());
Assert.assertEquals(Collections.singletonList(2L), rowList.get(2).metricValues());
Assert.assertEquals(Arrays.asList("2", null), rowList.get(3).dimensionValues());
Assert.assertEquals(Collections.singletonList(2L), rowList.get(3).metricValues());
Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dimA").hasBitmapIndexes());
Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dimC").hasBitmapIndexes());
if (useBitmapIndexes) {
checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimA", null));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimA", "1"));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimA", "2"));
checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", null));
checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("dimC", null));
checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimC", "1"));
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimC", "2"));
}
checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", ""));
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testNoRollupMergeWithDuplicateRow.
@Test
public void testNoRollupMergeWithDuplicateRow() throws Exception {
// (d3, d6, d8, d9) as actually data from index1 and index2
// index1 has two duplicate rows
// index2 has 1 row which is same as index1 row and another different row
// then we can test
// 1. incrementalIndex with duplicate rows
// 2. incrementalIndex without duplicate rows
// 3. merge 2 indexes with duplicate rows
IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("count")).withRollup(false).build();
IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder().setIndexSchema(indexSchema).setMaxRowCount(1000).build();
toPersistA.add(new MapBasedInputRow(1, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d1", "", "d2", "", "d3", "310", "d7", "", "d9", "910")));
toPersistA.add(new MapBasedInputRow(1, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d1", "", "d2", "", "d3", "310", "d7", "", "d9", "910")));
IncrementalIndex toPersistB = new OnheapIncrementalIndex.Builder().setIndexSchema(indexSchema).setMaxRowCount(1000).build();
toPersistB.add(new MapBasedInputRow(1, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d1", "", "d2", "", "d3", "310", "d7", "", "d9", "910")));
toPersistB.add(new MapBasedInputRow(4, Arrays.asList("d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d5", "", "d6", "621", "d7", "", "d8", "821", "d9", "921")));
final File tmpDirA = temporaryFolder.newFolder();
final File tmpDirB = temporaryFolder.newFolder();
final File tmpDirMerged = temporaryFolder.newFolder();
QueryableIndex indexA = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersistA, tmpDirA, indexSpec, null)));
QueryableIndex indexB = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersistB, tmpDirB, indexSpec, null)));
final QueryableIndex merged = closer.closeLater(indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(indexA, indexB), false, new AggregatorFactory[] { new CountAggregatorFactory("count") }, tmpDirMerged, indexSpec, null, -1)));
final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(ImmutableList.of("d3", "d6", "d8", "d9"), ImmutableList.copyOf(adapter.getDimensionNames()));
} else {
Assert.assertEquals(ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"), ImmutableList.copyOf(adapter.getDimensionNames()));
}
Assert.assertEquals(4, rowList.size());
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(0).dimensionValues());
Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(1).dimensionValues());
Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(2).dimensionValues());
Assert.assertEquals(Arrays.asList(null, "621", "821", "921"), rowList.get(3).dimensionValues());
} else {
Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(0).dimensionValues());
Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(1).dimensionValues());
Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(2).dimensionValues());
Assert.assertEquals(Arrays.asList(null, null, null, "", "621", "", "821", "921"), rowList.get(3).dimensionValues());
}
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d3", null));
checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d3", "310"));
checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d6", null));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d8", null));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d9", "910"));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d9", "921"));
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testJointDimMerge.
@Test
public void testJointDimMerge() throws Exception {
// (d1, d2, d3) from only one index, and their dim values are ('empty', 'has null', 'no null')
// (d4, d5, d6, d7, d8, d9) are from both indexes
// d4: 'empty' join 'empty'
// d5: 'empty' join 'has null'
// d6: 'empty' join 'no null'
// d7: 'has null' join 'has null'
// d8: 'has null' join 'no null'
// d9: 'no null' join 'no null'
IncrementalIndexSchema rollupIndexSchema = new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("count")).build();
IncrementalIndexSchema noRollupIndexSchema = new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("count")).withRollup(false).build();
for (IncrementalIndexSchema indexSchema : Arrays.asList(rollupIndexSchema, noRollupIndexSchema)) {
IncrementalIndex toPersistA = new OnheapIncrementalIndex.Builder().setIndexSchema(indexSchema).setMaxRowCount(1000).build();
toPersistA.add(new MapBasedInputRow(1, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d1", "", "d2", "", "d3", "310", "d7", "", "d9", "910")));
toPersistA.add(new MapBasedInputRow(2, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d2", "210", "d3", "311", "d7", "710", "d8", "810", "d9", "911")));
IncrementalIndex toPersistB = new OnheapIncrementalIndex.Builder().setIndexSchema(indexSchema).setMaxRowCount(1000).build();
toPersistB.add(new MapBasedInputRow(3, Arrays.asList("d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d5", "520", "d6", "620", "d7", "720", "d8", "820", "d9", "920")));
toPersistB.add(new MapBasedInputRow(4, Arrays.asList("d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.of("d5", "", "d6", "621", "d7", "", "d8", "821", "d9", "921")));
final File tmpDirA = temporaryFolder.newFolder();
final File tmpDirB = temporaryFolder.newFolder();
final File tmpDirMerged = temporaryFolder.newFolder();
QueryableIndex indexA = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersistA, tmpDirA, indexSpec, null)));
QueryableIndex indexB = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersistB, tmpDirB, indexSpec, null)));
final QueryableIndex merged = closer.closeLater(indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(indexA, indexB), true, new AggregatorFactory[] { new CountAggregatorFactory("count") }, tmpDirMerged, indexSpec, null, -1)));
final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"), ImmutableList.copyOf(adapter.getDimensionNames()));
} else {
Assert.assertEquals(ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"), ImmutableList.copyOf(adapter.getDimensionNames()));
}
Assert.assertEquals(4, rowList.size());
if (NullHandling.replaceWithDefault()) {
Assert.assertEquals(Arrays.asList(null, "310", null, null, null, null, "910"), rowList.get(0).dimensionValues());
Assert.assertEquals(Arrays.asList("210", "311", null, null, "710", "810", "911"), rowList.get(1).dimensionValues());
Assert.assertEquals(Arrays.asList(null, null, "520", "620", "720", "820", "920"), rowList.get(2).dimensionValues());
Assert.assertEquals(Arrays.asList(null, null, null, "621", null, "821", "921"), rowList.get(3).dimensionValues());
checkBitmapIndex(Arrays.asList(0, 2, 3), adapter.getBitmapIndex("d2", null));
checkBitmapIndex(Arrays.asList(0, 1, 3), adapter.getBitmapIndex("d5", null));
checkBitmapIndex(Arrays.asList(0, 3), adapter.getBitmapIndex("d7", null));
} else {
Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(0).dimensionValues());
Assert.assertEquals(Arrays.asList(null, "210", "311", null, null, "710", "810", "911"), rowList.get(1).dimensionValues());
Assert.assertEquals(Arrays.asList(null, null, null, "520", "620", "720", "820", "920"), rowList.get(2).dimensionValues());
Assert.assertEquals(Arrays.asList(null, null, null, "", "621", "", "821", "921"), rowList.get(3).dimensionValues());
checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d2", null));
checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d5", null));
checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d7", null));
}
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d2", "210"));
checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d3", null));
checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d3", "310"));
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d3", "311"));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d5", "520"));
checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d6", null));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d6", "620"));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d7", "710"));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d7", "720"));
checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d8", null));
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d8", "810"));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d8", "820"));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d9", "910"));
checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d9", "911"));
checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d9", "920"));
checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d9", "921"));
}
}
use of org.apache.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class IndexMergerTestBase method testPersistMerge.
@Test
public void testPersistMerge() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
IncrementalIndex toPersist2 = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(1000).build();
toPersist2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "1", "dim2", "2")));
toPersist2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.of("dim1", "5", "dim2", "6")));
final File tempDir1 = temporaryFolder.newFolder();
final File tempDir2 = temporaryFolder.newFolder();
final File mergedDir = temporaryFolder.newFolder();
QueryableIndex index1 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist1, tempDir1, indexSpec, null)));
Assert.assertEquals(2, index1.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
QueryableIndex index2 = closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist2, tempDir2, indexSpec, null)));
Assert.assertEquals(2, index2.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index2.getAvailableDimensions()));
Assert.assertEquals(3, index2.getColumnNames().size());
AggregatorFactory[] mergedAggregators = new AggregatorFactory[] { new CountAggregatorFactory("count") };
QueryableIndex merged = closer.closeLater(indexIO.loadIndex(indexMerger.mergeQueryableIndex(Arrays.asList(index1, index2), true, mergedAggregators, mergedDir, indexSpec, null, -1)));
Assert.assertEquals(3, merged.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions()));
Assert.assertEquals(3, merged.getColumnNames().size());
assertDimCompression(index2, indexSpec.getDimensionCompression());
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(merged, indexSpec.getDimensionCompression());
Assert.assertArrayEquals(getCombiningAggregators(mergedAggregators), merged.getMetadata().getAggregators());
}
Aggregations