Search in sources :

Example 31 with OnheapIncrementalIndex

use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.

the class SchemalessIndexTest method makeIncrementalIndex.

private static IncrementalIndex makeIncrementalIndex(final String resourceFilename, AggregatorFactory[] aggs) {
    URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename);
    log.info("Realtime loading resource[%s]", resource);
    String filename = resource.getFile();
    log.info("Realtime loading index file[%s]", filename);
    final IncrementalIndex retVal = new OnheapIncrementalIndex(new DateTime("2011-01-12T00:00:00.000Z").getMillis(), Granularities.MINUTE, aggs, 1000);
    try {
        final List<Object> events = jsonMapper.readValue(new File(filename), List.class);
        for (Object obj : events) {
            final Map<String, Object> event = jsonMapper.convertValue(obj, Map.class);
            final List<String> dims = Lists.newArrayList();
            for (Map.Entry<String, Object> entry : event.entrySet()) {
                if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
                    dims.add(entry.getKey());
                }
            }
            retVal.add(new MapBasedInputRow(new DateTime(event.get(TIMESTAMP)).getMillis(), dims, event));
        }
    } catch (IOException e) {
        index = null;
        throw Throwables.propagate(e);
    }
    return retVal;
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) URL(java.net.URL) DateTime(org.joda.time.DateTime) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) File(java.io.File) Map(java.util.Map)

Example 32 with OnheapIncrementalIndex

use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.

the class SchemalessIndexTest method makeRowPersistedIndexes.

private static void makeRowPersistedIndexes() {
    synchronized (log) {
        try {
            if (events.isEmpty()) {
                makeEvents();
            }
            for (final Map<String, Object> event : events) {
                final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis();
                final List<String> dims = Lists.newArrayList();
                for (Map.Entry<String, Object> entry : event.entrySet()) {
                    if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) {
                        dims.add(entry.getKey());
                    }
                }
                final IncrementalIndex rowIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000);
                rowIndex.add(new MapBasedInputRow(timestamp, dims, event));
                File tmpFile = File.createTempFile("billy", "yay");
                tmpFile.delete();
                tmpFile.mkdirs();
                tmpFile.deleteOnExit();
                INDEX_MERGER.persist(rowIndex, tmpFile, indexSpec);
                rowPersistedIndexes.add(INDEX_IO.loadIndex(tmpFile));
            }
        } catch (IOException e) {
            throw Throwables.propagate(e);
        }
    }
}
Also used : IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IOException(java.io.IOException) DateTime(org.joda.time.DateTime) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Map(java.util.Map) File(java.io.File)

Example 33 with OnheapIncrementalIndex

use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.

the class IndexMergerTest method getIndexD3.

private IncrementalIndex getIndexD3() throws Exception {
    IncrementalIndex toPersist1 = new OnheapIncrementalIndex(0L, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, 1000);
    toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "100", "d2", "4000", "d3", "30000")));
    toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "300", "d2", "2000", "d3", "40000")));
    toPersist1.add(new MapBasedInputRow(1, Arrays.asList("d3", "d1", "d2"), ImmutableMap.<String, Object>of("d1", "200", "d2", "3000", "d3", "50000")));
    return toPersist1;
}
Also used : CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) IncrementalIndex(io.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) MapBasedInputRow(io.druid.data.input.MapBasedInputRow)

Example 34 with OnheapIncrementalIndex

use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.

the class IndexGeneratorJob method makeIncrementalIndex.

private static IncrementalIndex makeIncrementalIndex(Bucket theBucket, AggregatorFactory[] aggs, HadoopDruidIndexerConfig config, Iterable<String> oldDimOrder, Map<String, ColumnCapabilitiesImpl> oldCapabilities) {
    final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig();
    final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder().withMinTimestamp(theBucket.time.getMillis()).withTimestampSpec(config.getSchema().getDataSchema().getParser().getParseSpec().getTimestampSpec()).withDimensionsSpec(config.getSchema().getDataSchema().getParser()).withQueryGranularity(config.getSchema().getDataSchema().getGranularitySpec().getQueryGranularity()).withMetrics(aggs).withRollup(config.getSchema().getDataSchema().getGranularitySpec().isRollup()).build();
    OnheapIncrementalIndex newIndex = new OnheapIncrementalIndex(indexSchema, !tuningConfig.isIgnoreInvalidRows(), tuningConfig.getRowFlushBoundary());
    if (oldDimOrder != null && !indexSchema.getDimensionsSpec().hasCustomDimensions()) {
        newIndex.loadDimensionIterable(oldDimOrder, oldCapabilities);
    }
    return newIndex;
}
Also used : OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSchema(io.druid.segment.incremental.IncrementalIndexSchema)

Example 35 with OnheapIncrementalIndex

use of io.druid.segment.incremental.OnheapIncrementalIndex in project druid by druid-io.

the class MultiValuedDimensionTest method setupClass.

@BeforeClass
public static void setupClass() throws Exception {
    incrementalIndex = new OnheapIncrementalIndex(0, Granularities.NONE, new AggregatorFactory[] { new CountAggregatorFactory("count") }, true, true, true, 5000);
    StringInputRowParser parser = new StringInputRowParser(new CSVParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("product", "tags")), null, null), "\t", ImmutableList.of("timestamp", "product", "tags")), "UTF-8");
    String[] rows = new String[] { "2011-01-12T00:00:00.000Z,product_1,t1\tt2\tt3", "2011-01-13T00:00:00.000Z,product_2,t3\tt4\tt5", "2011-01-14T00:00:00.000Z,product_3,t5\tt6\tt7", "2011-01-14T00:00:00.000Z,product_4" };
    for (String row : rows) {
        incrementalIndex.add(parser.parse(row));
    }
    persistedSegmentDir = Files.createTempDir();
    TestHelper.getTestIndexMerger().persist(incrementalIndex, persistedSegmentDir, new IndexSpec());
    queryableIndex = TestHelper.getTestIndexIO().loadIndex(persistedSegmentDir);
}
Also used : IndexSpec(io.druid.segment.IndexSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) CSVParseSpec(io.druid.data.input.impl.CSVParseSpec) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) BeforeClass(org.junit.BeforeClass)

Aggregations

OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)41 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)33 MapBasedInputRow (io.druid.data.input.MapBasedInputRow)27 File (java.io.File)17 DateTime (org.joda.time.DateTime)17 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)16 Test (org.junit.Test)16 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)15 IncrementalIndexSchema (io.druid.segment.incremental.IncrementalIndexSchema)12 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)8 IOException (java.io.IOException)8 SpatialDimensionSchema (io.druid.data.input.impl.SpatialDimensionSchema)7 IncrementalIndexTest (io.druid.segment.data.IncrementalIndexTest)7 TimestampSpec (io.druid.data.input.impl.TimestampSpec)6 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)6 Random (java.util.Random)6 StringInputRowParser (io.druid.data.input.impl.StringInputRowParser)5 IndexSpec (io.druid.segment.IndexSpec)5 Result (io.druid.query.Result)4 InputRow (io.druid.data.input.InputRow)3