use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IndexMergerTest method testAddMetrics.
@Test
public void testAddMetrics() throws IOException {
IncrementalIndex index1 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A") });
closer.closeLater(index1);
long timestamp = System.currentTimeMillis();
index1.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "A", 5)));
IncrementalIndex index2 = IncrementalIndexTest.createIndex(new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") });
index2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2", "A", 5, "C", 6)));
closer.closeLater(index2);
Interval interval = new Interval(0, new DateTime().getMillis());
RoaringBitmapFactory factory = new RoaringBitmapFactory();
ArrayList<IndexableAdapter> toMerge = Lists.<IndexableAdapter>newArrayList(new IncrementalIndexAdapter(interval, index1, factory), new IncrementalIndexAdapter(interval, index2, factory));
final File tmpDirMerged = temporaryFolder.newFolder();
File merged = INDEX_MERGER.merge(toMerge, true, new AggregatorFactory[] { new LongSumAggregatorFactory("A", "A"), new LongSumAggregatorFactory("C", "C") }, tmpDirMerged, indexSpec);
final QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(closer.closeLater(INDEX_IO.loadIndex(merged)));
Assert.assertEquals(ImmutableSet.of("A", "C"), ImmutableSet.copyOf(adapter.getAvailableMetrics()));
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IndexMergerTest method getIndexWithNumericDims.
private IncrementalIndex getIndexWithNumericDims() throws Exception {
IncrementalIndex index = getIndexWithDimsFromSchemata(Arrays.asList(new LongDimensionSchema("dimA"), new FloatDimensionSchema("dimB"), new StringDimensionSchema("dimC")));
index.add(new MapBasedInputRow(1, Arrays.asList("dimA", "dimB", "dimC"), ImmutableMap.<String, Object>of("dimA", 100L, "dimB", 4000.567, "dimC", "Hello")));
index.add(new MapBasedInputRow(1, Arrays.asList("dimA", "dimB", "dimC"), ImmutableMap.<String, Object>of("dimA", 72L, "dimB", 60000.789, "dimC", "World")));
index.add(new MapBasedInputRow(1, Arrays.asList("dimA", "dimB", "dimC"), ImmutableMap.<String, Object>of("dimA", 3001L, "dimB", 1.2345, "dimC", "Foobar")));
index.add(new MapBasedInputRow(1, Arrays.asList("dimA", "dimB", "dimC"), ImmutableMap.<String, Object>of("dimC", "Nully Row")));
return index;
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IndexMergerTest method testPersistMerge.
@Test
public void testPersistMerge() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist1 = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist1);
IncrementalIndex toPersist2 = new OnheapIncrementalIndex(0L, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
toPersist2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "1", "dim2", "2")));
toPersist2.add(new MapBasedInputRow(timestamp, Arrays.asList("dim1", "dim2"), ImmutableMap.<String, Object>of("dim1", "5", "dim2", "6")));
final File tempDir1 = temporaryFolder.newFolder();
final File tempDir2 = temporaryFolder.newFolder();
final File mergedDir = temporaryFolder.newFolder();
QueryableIndex index1 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist1, tempDir1, indexSpec)));
Assert.assertEquals(2, index1.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions()));
Assert.assertEquals(3, index1.getColumnNames().size());
QueryableIndex index2 = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist2, tempDir2, indexSpec)));
Assert.assertEquals(2, index2.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index2.getAvailableDimensions()));
Assert.assertEquals(3, index2.getColumnNames().size());
AggregatorFactory[] mergedAggregators = new AggregatorFactory[] { new CountAggregatorFactory("count") };
QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(Arrays.asList(index1, index2), true, mergedAggregators, mergedDir, indexSpec)));
Assert.assertEquals(3, merged.getColumn(Column.TIME_COLUMN_NAME).getLength());
Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions()));
Assert.assertEquals(3, merged.getColumnNames().size());
assertDimCompression(index2, indexSpec.getDimensionCompression());
assertDimCompression(index1, indexSpec.getDimensionCompression());
assertDimCompression(merged, indexSpec.getDimensionCompression());
Assert.assertArrayEquals(getCombiningAggregators(mergedAggregators), merged.getMetadata().getAggregators());
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class IndexMergerTest method testNoRollupMergeWithoutDuplicateRow.
@Test
public void testNoRollupMergeWithoutDuplicateRow() throws Exception {
// (d1, d2, d3) from only one index, and their dim values are ('empty', 'has null', 'no null')
// (d4, d5, d6, d7, d8, d9) are from both indexes
// d4: 'empty' join 'empty'
// d5: 'empty' join 'has null'
// d6: 'empty' join 'no null'
// d7: 'has null' join 'has null'
// d8: 'has null' join 'no null'
// d9: 'no null' join 'no null'
IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder().withMinTimestamp(0L).withQueryGranularity(Granularities.NONE).withMetrics(new AggregatorFactory[] { new CountAggregatorFactory("count") }).withRollup(false).build();
IncrementalIndex toPersistA = new OnheapIncrementalIndex(indexSchema, true, 1000);
toPersistA.add(new MapBasedInputRow(1, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.<String, Object>of("d1", "", "d2", "", "d3", "310", "d7", "", "d9", "910")));
toPersistA.add(new MapBasedInputRow(2, Arrays.asList("d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.<String, Object>of("d2", "210", "d3", "311", "d7", "710", "d8", "810", "d9", "911")));
IncrementalIndex toPersistB = new OnheapIncrementalIndex(indexSchema, true, 1000);
toPersistB.add(new MapBasedInputRow(3, Arrays.asList("d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.<String, Object>of("d5", "520", "d6", "620", "d7", "720", "d8", "820", "d9", "920")));
toPersistB.add(new MapBasedInputRow(4, Arrays.asList("d4", "d5", "d6", "d7", "d8", "d9"), ImmutableMap.<String, Object>of("d5", "", "d6", "621", "d7", "", "d8", "821", "d9", "921")));
final File tmpDirA = temporaryFolder.newFolder();
final File tmpDirB = temporaryFolder.newFolder();
final File tmpDirMerged = temporaryFolder.newFolder();
QueryableIndex indexA = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersistA, tmpDirA, indexSpec)));
QueryableIndex indexB = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersistB, tmpDirB, indexSpec)));
final QueryableIndex merged = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(Arrays.asList(indexA, indexB), true, new AggregatorFactory[] { new CountAggregatorFactory("count") }, tmpDirMerged, indexSpec)));
final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
final List<Rowboat> boatList = ImmutableList.copyOf(adapter.getRows());
Assert.assertEquals(ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"), ImmutableList.copyOf(adapter.getDimensionNames()));
Assert.assertEquals(4, boatList.size());
Assert.assertArrayEquals(new int[][] { { 0 }, { 1 }, { 0 }, { 0 }, { 0 }, { 0 }, { 0 } }, boatList.get(0).getDims());
Assert.assertArrayEquals(new int[][] { { 1 }, { 2 }, { 0 }, { 0 }, { 1 }, { 1 }, { 1 } }, boatList.get(1).getDims());
Assert.assertArrayEquals(new int[][] { { 0 }, { 0 }, { 1 }, { 1 }, { 2 }, { 2 }, { 2 } }, boatList.get(2).getDims());
Assert.assertArrayEquals(new int[][] { { 0 }, { 0 }, { 0 }, { 2 }, { 0 }, { 3 }, { 3 } }, boatList.get(3).getDims());
checkBitmapIndex(Lists.newArrayList(0, 2, 3), adapter.getBitmapIndex("d2", ""));
checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("d2", "210"));
checkBitmapIndex(Lists.newArrayList(2, 3), adapter.getBitmapIndex("d3", ""));
checkBitmapIndex(Lists.newArrayList(0), adapter.getBitmapIndex("d3", "310"));
checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("d3", "311"));
checkBitmapIndex(Lists.newArrayList(0, 1, 3), adapter.getBitmapIndex("d5", ""));
checkBitmapIndex(Lists.newArrayList(2), adapter.getBitmapIndex("d5", "520"));
checkBitmapIndex(Lists.newArrayList(0, 1), adapter.getBitmapIndex("d6", ""));
checkBitmapIndex(Lists.newArrayList(2), adapter.getBitmapIndex("d6", "620"));
checkBitmapIndex(Lists.newArrayList(3), adapter.getBitmapIndex("d6", "621"));
checkBitmapIndex(Lists.newArrayList(0, 3), adapter.getBitmapIndex("d7", ""));
checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("d7", "710"));
checkBitmapIndex(Lists.newArrayList(2), adapter.getBitmapIndex("d7", "720"));
checkBitmapIndex(Lists.newArrayList(0), adapter.getBitmapIndex("d8", ""));
checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("d8", "810"));
checkBitmapIndex(Lists.newArrayList(2), adapter.getBitmapIndex("d8", "820"));
checkBitmapIndex(Lists.newArrayList(3), adapter.getBitmapIndex("d8", "821"));
checkBitmapIndex(new ArrayList<Integer>(), adapter.getBitmapIndex("d9", ""));
checkBitmapIndex(Lists.newArrayList(0), adapter.getBitmapIndex("d9", "910"));
checkBitmapIndex(Lists.newArrayList(1), adapter.getBitmapIndex("d9", "911"));
checkBitmapIndex(Lists.newArrayList(2), adapter.getBitmapIndex("d9", "920"));
checkBitmapIndex(Lists.newArrayList(3), adapter.getBitmapIndex("d9", "921"));
}
use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.
the class SchemalessIndexTest method getIncrementalIndex.
public static QueryableIndex getIncrementalIndex(int index1, int index2) {
synchronized (log) {
if (events.isEmpty()) {
makeEvents();
}
Map<Integer, QueryableIndex> entry = incrementalIndexes.get(index1);
if (entry != null) {
QueryableIndex index = entry.get(index2);
if (index != null) {
return index;
}
} else {
entry = Maps.<Integer, QueryableIndex>newHashMap();
incrementalIndexes.put(index1, entry);
}
IncrementalIndex theIndex = null;
int count = 0;
for (final Map<String, Object> event : events) {
if (count != index1 && count != index2) {
count++;
continue;
}
final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
if (theIndex == null) {
theIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000);
}
final List<String> dims = Lists.newArrayList();
for (final Map.Entry<String, Object> val : event.entrySet()) {
if (!val.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(val.getKey())) {
dims.add(val.getKey());
}
}
try {
theIndex.add(new MapBasedInputRow(timestamp, dims, event));
} catch (IndexSizeExceededException e) {
Throwables.propagate(e);
}
count++;
}
QueryableIndex retVal = TestIndex.persistRealtimeAndLoadMMapped(theIndex);
entry.put(index2, retVal);
return retVal;
}
}
Aggregations