use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class QueryableIndexIndexableAdapterTest method testGetBitmapIndex.
@Test
public void testGetBitmapIndex() throws Exception {
final long timestamp = System.currentTimeMillis();
IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null);
IncrementalIndexTest.populateIndex(timestamp, toPersist);
final File tempDir = temporaryFolder.newFolder();
QueryableIndex index = closer.closeLater(INDEX_IO.loadIndex(INDEX_MERGER.persist(toPersist, tempDir, INDEX_SPEC)));
IndexableAdapter adapter = new QueryableIndexIndexableAdapter(index);
String dimension = "dim1";
//null is added to all dimensions with value
IndexedInts indexedInts = adapter.getBitmapIndex(dimension, 0);
for (int i = 0; i < adapter.getDimValueLookup(dimension).size(); i++) {
indexedInts = adapter.getBitmapIndex(dimension, i);
Assert.assertEquals(1, indexedInts.size());
}
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method getIncrementalIndex.
public static QueryableIndex getIncrementalIndex(int index1, int index2) {
synchronized (log) {
if (events.isEmpty()) {
makeEvents();
}
Map<Integer, QueryableIndex> entry = incrementalIndexes.get(index1);
if (entry != null) {
QueryableIndex index = entry.get(index2);
if (index != null) {
return index;
}
} else {
entry = Maps.<Integer, QueryableIndex>newHashMap();
incrementalIndexes.put(index1, entry);
}
IncrementalIndex theIndex = null;
int count = 0;
for (final Map<String, Object> event : events) {
if (count != index1 && count != index2) {
count++;
continue;
}
final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
if (theIndex == null) {
theIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000);
}
final List<String> dims = Lists.newArrayList();
for (final Map.Entry<String, Object> val : event.entrySet()) {
if (!val.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(val.getKey())) {
dims.add(val.getKey());
}
}
try {
theIndex.add(new MapBasedInputRow(timestamp, dims, event));
} catch (IndexSizeExceededException e) {
Throwables.propagate(e);
}
count++;
}
QueryableIndex retVal = TestIndex.persistRealtimeAndLoadMMapped(theIndex);
entry.put(index2, retVal);
return retVal;
}
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method makeIncrementalIndex.
private static IncrementalIndex makeIncrementalIndex(final String resourceFilename, AggregatorFactory[] aggs) {
URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename);
log.info("Realtime loading resource[%s]", resource);
String filename = resource.getFile();
log.info("Realtime loading index file[%s]", filename);
final IncrementalIndex retVal = new OnheapIncrementalIndex(new DateTime("2011-01-12T00:00:00.000Z").getMillis(), Granularities.MINUTE, aggs, 1000);
try {
final List<Object> events = jsonMapper.readValue(new File(filename), List.class);
for (Object obj : events) {
final Map<String, Object> event = jsonMapper.convertValue(obj, Map.class);
final List<String> dims = Lists.newArrayList();
for (Map.Entry<String, Object> entry : event.entrySet()) {
if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
dims.add(entry.getKey());
}
}
retVal.add(new MapBasedInputRow(new DateTime(event.get(TIMESTAMP)).getMillis(), dims, event));
}
} catch (IOException e) {
index = null;
throw Throwables.propagate(e);
}
return retVal;
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method getMergedIncrementalIndex.
public static QueryableIndex getMergedIncrementalIndex() {
synchronized (log) {
if (mergedIndex != null) {
return mergedIndex;
}
try {
IncrementalIndex top = makeIncrementalIndex("druid.sample.json.top", METRIC_AGGS);
IncrementalIndex bottom = makeIncrementalIndex("druid.sample.json.bottom", METRIC_AGGS);
File tmpFile = File.createTempFile("yay", "who");
tmpFile.delete();
File topFile = new File(tmpFile, "top");
File bottomFile = new File(tmpFile, "bottom");
File mergedFile = new File(tmpFile, "merged");
topFile.mkdirs();
topFile.deleteOnExit();
bottomFile.mkdirs();
bottomFile.deleteOnExit();
mergedFile.mkdirs();
mergedFile.deleteOnExit();
INDEX_MERGER.persist(top, topFile, indexSpec);
INDEX_MERGER.persist(bottom, bottomFile, indexSpec);
mergedIndex = INDEX_IO.loadIndex(INDEX_MERGER.mergeQueryableIndex(Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), true, METRIC_AGGS, mergedFile, indexSpec));
return mergedIndex;
} catch (IOException e) {
mergedIndex = null;
throw Throwables.propagate(e);
}
}
}
use of io.druid.segment.incremental.IncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method makeRowPersistedIndexes.
private static void makeRowPersistedIndexes() {
synchronized (log) {
try {
if (events.isEmpty()) {
makeEvents();
}
for (final Map<String, Object> event : events) {
final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
final List<String> dims = Lists.newArrayList();
for (Map.Entry<String, Object> entry : event.entrySet()) {
if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
dims.add(entry.getKey());
}
}
final IncrementalIndex rowIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000);
rowIndex.add(new MapBasedInputRow(timestamp, dims, event));
File tmpFile = File.createTempFile("billy", "yay");
tmpFile.delete();
tmpFile.mkdirs();
tmpFile.deleteOnExit();
INDEX_MERGER.persist(rowIndex, tmpFile, indexSpec);
rowPersistedIndexes.add(INDEX_IO.loadIndex(tmpFile));
}
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
Aggregations