Search in sources :

Example 16 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testObjectColumnSelectorOnVaryingColumnSchema.

@Test
public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T00:00:00"), Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
    index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), ImmutableMap.of("billy", "hip", "sally", "hop")));
    try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
        final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {

            @Override
            public int getMaxIntermediateRows() {
                return 5;
            }
        }), pool);
        final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).addAggregator(new JavaScriptAggregatorFactory("fieldLength", Arrays.asList("sally", "billy"), "function(current, s, b) { return current + (s == null ? 0 : s.length) + (b == null ? 0 : b.length); }", "function() { return 0; }", "function(a,b) { return a + b; }", JavaScriptConfig.getEnabledInstance())).build(), new IncrementalIndexStorageAdapter(index));
        final List<Row> results = rows.toList();
        Assert.assertEquals(2, results.size());
        MapBasedRow row = (MapBasedRow) results.get(0);
        Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());
        row = (MapBasedRow) results.get(1);
        Assert.assertEquals(ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0), row.getEvent());
    }
}
Also used : GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) JavaScriptAggregatorFactory(org.apache.druid.query.aggregation.JavaScriptAggregatorFactory) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ByteBuffer(java.nio.ByteBuffer) MapBasedRow(org.apache.druid.data.input.MapBasedRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) GroupByQueryEngine(org.apache.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 17 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testSanity.

@Test
public void testSanity() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
    try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
        final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {

            @Override
            public int getMaxIntermediateRows() {
                return 5;
            }
        }), pool);
        final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).build(), new IncrementalIndexStorageAdapter(index));
        final List<Row> results = rows.toList();
        Assert.assertEquals(2, results.size());
        MapBasedRow row = (MapBasedRow) results.get(0);
        Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
        row = (MapBasedRow) results.get(1);
        Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
    }
}
Also used : GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ByteBuffer(java.nio.ByteBuffer) MapBasedRow(org.apache.druid.data.input.MapBasedRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) GroupByQueryEngine(org.apache.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 18 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method sameRow.

@Test
public void sameRow() throws IndexSizeExceededException {
    MapBasedInputRow row = new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B"));
    IncrementalIndex index = indexCreator.createIndex();
    index.add(row);
    index.add(row);
    index.add(row);
    Assert.assertEquals(1, index.size());
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 19 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method controlTest.

@Test
public void controlTest() throws IndexSizeExceededException {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "C", "joe", "B")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 20 with MapBasedInputRow

use of org.apache.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class IncrementalIndexTest method testDuplicateDimensions.

@Test(expected = ISE.class)
public void testDuplicateDimensions() throws IndexSizeExceededException {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B")));
}
Also used : MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)114 Test (org.junit.Test)77 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)46 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)42 OnheapIncrementalIndex (org.apache.druid.segment.incremental.OnheapIncrementalIndex)38 InputRow (org.apache.druid.data.input.InputRow)31 File (java.io.File)24 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)21 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)20 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)19 ArrayList (java.util.ArrayList)17 HashMap (java.util.HashMap)15 DateTime (org.joda.time.DateTime)15 TimestampSpec (org.apache.druid.data.input.impl.TimestampSpec)14 IncrementalIndexTest (org.apache.druid.segment.data.IncrementalIndexTest)14 Interval (org.joda.time.Interval)14 IOException (java.io.IOException)13 DoubleDimensionSchema (org.apache.druid.data.input.impl.DoubleDimensionSchema)13 IncrementalIndexSchema (org.apache.druid.segment.incremental.IncrementalIndexSchema)12 ImmutableMap (com.google.common.collect.ImmutableMap)11