use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method makeIncrementalIndex.
private static IncrementalIndex makeIncrementalIndex(final String resourceFilename, AggregatorFactory[] aggs) {
URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename);
log.info("Realtime loading resource[%s]", resource);
String filename = resource.getFile();
log.info("Realtime loading index file[%s]", filename);
final IncrementalIndex retVal = new OnheapIncrementalIndex(new DateTime("2011-01-12T00:00:00.000Z").getMillis(), Granularities.MINUTE, aggs, 1000);
try {
final List<Object> events = jsonMapper.readValue(new File(filename), List.class);
for (Object obj : events) {
final Map<String, Object> event = jsonMapper.convertValue(obj, Map.class);
final List<String> dims = Lists.newArrayList();
for (Map.Entry<String, Object> entry : event.entrySet()) {
if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
dims.add(entry.getKey());
}
}
retVal.add(new MapBasedInputRow(new DateTime(event.get(TIMESTAMP)).getMillis(), dims, event));
}
} catch (IOException e) {
index = null;
throw Throwables.propagate(e);
}
return retVal;
}
use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.
the class SchemalessIndexTest method makeRowPersistedIndexes.
private static void makeRowPersistedIndexes() {
synchronized (log) {
try {
if (events.isEmpty()) {
makeEvents();
}
for (final Map<String, Object> event : events) {
final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
final List<String> dims = Lists.newArrayList();
for (Map.Entry<String, Object> entry : event.entrySet()) {
if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
dims.add(entry.getKey());
}
}
final IncrementalIndex rowIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000);
rowIndex.add(new MapBasedInputRow(timestamp, dims, event));
File tmpFile = File.createTempFile("billy", "yay");
tmpFile.delete();
tmpFile.mkdirs();
tmpFile.deleteOnExit();
INDEX_MERGER.persist(rowIndex, tmpFile, indexSpec);
rowPersistedIndexes.add(INDEX_IO.loadIndex(tmpFile));
}
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.
the class IndexMergerTest method getIndexD3.
private IncrementalIndex getIndexD3() throws Exception {
IncrementalIndex toPersist1 = new OnheapIncrementalIndex(0L, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "100", "d2", "4000", "d3", "30000")));
toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "300", "d2", "2000", "d3", "40000")));
toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "200", "d2", "3000", "d3", "50000")));
return toPersist1;
}
use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.
the class IndexGeneratorJob method makeIncrementalIndex.
private static IncrementalIndex makeIncrementalIndex(Bucket theBucket, AggregatorFactory[] aggs, HadoopDruidIndexerConfig config, Iterable<String> oldDimOrder, Map<String, ColumnCapabilitiesImpl> oldCapabilities) {
final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig();
final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder().withMinTimestamp(theBucket.time.getMillis()).withTimestampSpec(config.getSchema().getDataSchema().getParser().getParseSpec().getTimestampSpec()).withDimensionsSpec(config.getSchema().getDataSchema().getParser()).withQueryGranularity(config.getSchema().getDataSchema().getGranularitySpec().getQueryGranularity()).withMetrics(aggs).withRollup(config.getSchema().getDataSchema().getGranularitySpec().isRollup()).build();
OnheapIncrementalIndex newIndex = new OnheapIncrementalIndex(indexSchema, !tuningConfig.isIgnoreInvalidRows(), tuningConfig.getRowFlushBoundary());
if (oldDimOrder != null && !indexSchema.getDimensionsSpec().hasCustomDimensions()) {
newIndex.loadDimensionIterable(oldDimOrder, oldCapabilities);
}
return newIndex;
}
use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.
the class MultiValuedDimensionTest method setupClass.
@BeforeClass
public static void setupClass() throws Exception {
incrementalIndex = new OnheapIncrementalIndex(0, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, true, true, true, 5000);
StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags")), null, null), "\t", ImmutableList.of("timestamp", "product", "tags")), "UTF-8");
String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7", "2011-01-14T00:00:00.000Z,product_4" };
for (String row : rows) {
incrementalIndex.add(parser.parse(row));
}
persistedSegmentDir = Files.createTempDir();
TestHelper.getTestIndexMerger().persist(incrementalIndex, persistedSegmentDir, new IndexSpec());
queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
}
Aggregations