Search in sources :

Example 1 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class StringColumnAggregationTest method testGroupBy.

@Test
public void testGroupBy() {
    GroupByQuery query = new GroupByQuery.Builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval("1970/2050").setAggregatorSpecs(new DoubleSumAggregatorFactory("singleDoubleSum", singleValue), new DoubleSumAggregatorFactory("multiDoubleSum", multiValue), new DoubleMaxAggregatorFactory("singleDoubleMax", singleValue), new DoubleMaxAggregatorFactory("multiDoubleMax", multiValue), new DoubleMinAggregatorFactory("singleDoubleMin", singleValue), new DoubleMinAggregatorFactory("multiDoubleMin", multiValue), new FloatSumAggregatorFactory("singleFloatSum", singleValue), new FloatSumAggregatorFactory("multiFloatSum", multiValue), new FloatMaxAggregatorFactory("singleFloatMax", singleValue), new FloatMaxAggregatorFactory("multiFloatMax", multiValue), new FloatMinAggregatorFactory("singleFloatMin", singleValue), new FloatMinAggregatorFactory("multiFloatMin", multiValue), new LongSumAggregatorFactory("singleLongSum", singleValue), new LongSumAggregatorFactory("multiLongSum", multiValue), new LongMaxAggregatorFactory("singleLongMax", singleValue), new LongMaxAggregatorFactory("multiLongMax", multiValue), new LongMinAggregatorFactory("singleLongMin", singleValue), new LongMinAggregatorFactory("multiLongMin", multiValue), new LongSumAggregatorFactory("count", "count")).build();
    Sequence<ResultRow> seq = aggregationTestHelper.runQueryOnSegmentsObjs(segments, query);
    Row result = Iterables.getOnlyElement(seq.toList()).toMapBasedRow(query);
    Assert.assertEquals(numRows, result.getMetric("count").longValue());
    Assert.assertEquals(singleValueSum, result.getMetric("singleDoubleSum").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueSum, result.getMetric("multiDoubleSum").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueMax, result.getMetric("singleDoubleMax").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueMax, result.getMetric("multiDoubleMax").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueMin, result.getMetric("singleDoubleMin").doubleValue(), 0.0001d);
    Assert.assertEquals(multiValueMin, result.getMetric("multiDoubleMin").doubleValue(), 0.0001d);
    Assert.assertEquals(singleValueSum, result.getMetric("singleFloatSum").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueSum, result.getMetric("multiFloatSum").floatValue(), 0.0001f);
    Assert.assertEquals(singleValueMax, result.getMetric("singleFloatMax").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueMax, result.getMetric("multiFloatMax").floatValue(), 0.0001f);
    Assert.assertEquals(singleValueMin, result.getMetric("singleFloatMin").floatValue(), 0.0001f);
    Assert.assertEquals(multiValueMin, result.getMetric("multiFloatMin").floatValue(), 0.0001f);
    Assert.assertEquals((long) singleValueSum, result.getMetric("singleLongSum").longValue());
    Assert.assertEquals((long) multiValueSum, result.getMetric("multiLongSum").longValue());
    Assert.assertEquals((long) singleValueMax, result.getMetric("singleLongMax").longValue());
    Assert.assertEquals((long) multiValueMax, result.getMetric("multiLongMax").longValue());
    Assert.assertEquals((long) singleValueMin, result.getMetric("singleLongMin").longValue());
    Assert.assertEquals((long) multiValueMin, result.getMetric("multiLongMin").longValue());
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Row(org.apache.druid.data.input.Row) ResultRow(org.apache.druid.query.groupby.ResultRow) InputRow(org.apache.druid.data.input.InputRow) Test(org.junit.Test)

Example 2 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class DoubleMeanAggregationTest method testVectorAggretatorUsingGroupByQueryOnDoubleColumn.

@Test
@Parameters(method = "doVectorize")
public void testVectorAggretatorUsingGroupByQueryOnDoubleColumn(boolean doVectorize) throws Exception {
    GroupByQuery query = new GroupByQuery.Builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval("1970/2050").setAggregatorSpecs(new DoubleMeanAggregatorFactory("meanOnDouble", SimpleTestIndex.DOUBLE_COL)).setContext(Collections.singletonMap(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
    // do json serialization and deserialization of query to ensure there are no serde issues
    ObjectMapper jsonMapper = groupByQueryTestHelper.getObjectMapper();
    query = (GroupByQuery) jsonMapper.readValue(jsonMapper.writeValueAsString(query), Query.class);
    Sequence<ResultRow> seq = groupByQueryTestHelper.runQueryOnSegmentsObjs(segments, query);
    Row result = Iterables.getOnlyElement(seq.toList()).toMapBasedRow(query);
    Assert.assertEquals(6.2d, result.getMetric("meanOnDouble").doubleValue(), 0.0001d);
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) Row(org.apache.druid.data.input.Row) ResultRow(org.apache.druid.query.groupby.ResultRow) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 3 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testFilterByNull.

@Test
public void testFilterByNull() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
    try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
        final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {

            @Override
            public int getMaxIntermediateRows() {
                return 5;
            }
        }), pool);
        final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).setDimFilter(DimFilters.dimEquals("sally", (String) null)).build(), new IncrementalIndexStorageAdapter(index));
        final List<Row> results = rows.toList();
        Assert.assertEquals(1, results.size());
        MapBasedRow row = (MapBasedRow) results.get(0);
        Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
    }
}
Also used : GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ByteBuffer(java.nio.ByteBuffer) MapBasedRow(org.apache.druid.data.input.MapBasedRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) GroupByQueryEngine(org.apache.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 4 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testObjectColumnSelectorOnVaryingColumnSchema.

@Test
public void testObjectColumnSelectorOnVaryingColumnSchema() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T00:00:00"), Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
    index.add(new MapBasedInputRow(DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), ImmutableMap.of("billy", "hip", "sally", "hop")));
    try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
        final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {

            @Override
            public int getMaxIntermediateRows() {
                return 5;
            }
        }), pool);
        final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).addAggregator(new JavaScriptAggregatorFactory("fieldLength", Arrays.asList("sally", "billy"), "function(current, s, b) { return current + (s == null ? 0 : s.length) + (b == null ? 0 : b.length); }", "function() { return 0; }", "function(a,b) { return a + b; }", JavaScriptConfig.getEnabledInstance())).build(), new IncrementalIndexStorageAdapter(index));
        final List<Row> results = rows.toList();
        Assert.assertEquals(2, results.size());
        MapBasedRow row = (MapBasedRow) results.get(0);
        Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L, "fieldLength", 2.0), row.getEvent());
        row = (MapBasedRow) results.get(1);
        Assert.assertEquals(ImmutableMap.of("billy", "hip", "sally", "hop", "cnt", 1L, "fieldLength", 6.0), row.getEvent());
    }
}
Also used : GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) JavaScriptAggregatorFactory(org.apache.druid.query.aggregation.JavaScriptAggregatorFactory) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ByteBuffer(java.nio.ByteBuffer) MapBasedRow(org.apache.druid.data.input.MapBasedRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) GroupByQueryEngine(org.apache.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 5 with Row

use of org.apache.druid.data.input.Row in project druid by druid-io.

the class IncrementalIndexStorageAdapterTest method testSanity.

@Test
public void testSanity() throws Exception {
    IncrementalIndex index = indexCreator.createIndex();
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("billy"), ImmutableMap.of("billy", "hi")));
    index.add(new MapBasedInputRow(System.currentTimeMillis() - 1, Collections.singletonList("sally"), ImmutableMap.of("sally", "bo")));
    try (CloseableStupidPool<ByteBuffer> pool = new CloseableStupidPool<>("GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocate(50000))) {
        final GroupByQueryEngine engine = new GroupByQueryEngine(Suppliers.ofInstance(new GroupByQueryConfig() {

            @Override
            public int getMaxIntermediateRows() {
                return 5;
            }
        }), pool);
        final Sequence<Row> rows = engine.process(GroupByQuery.builder().setDataSource("test").setGranularity(Granularities.ALL).setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())).addDimension("billy").addDimension("sally").addAggregator(new LongSumAggregatorFactory("cnt", "cnt")).build(), new IncrementalIndexStorageAdapter(index));
        final List<Row> results = rows.toList();
        Assert.assertEquals(2, results.size());
        MapBasedRow row = (MapBasedRow) results.get(0);
        Assert.assertEquals(ImmutableMap.of("sally", "bo", "cnt", 1L), row.getEvent());
        row = (MapBasedRow) results.get(1);
        Assert.assertEquals(ImmutableMap.of("billy", "hi", "cnt", 1L), row.getEvent());
    }
}
Also used : GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) CloseableStupidPool(org.apache.druid.collections.CloseableStupidPool) ByteBuffer(java.nio.ByteBuffer) MapBasedRow(org.apache.druid.data.input.MapBasedRow) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Row(org.apache.druid.data.input.Row) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) GroupByQueryEngine(org.apache.druid.query.groupby.GroupByQueryEngine) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

Row (org.apache.druid.data.input.Row)54 Test (org.junit.Test)44 ArrayList (java.util.ArrayList)32 MapBasedRow (org.apache.druid.data.input.MapBasedRow)21 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)21 File (java.io.File)18 Firehose (org.apache.druid.data.input.Firehose)15 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)15 HashMap (java.util.HashMap)13 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)13 MapBasedInputRow (org.apache.druid.data.input.MapBasedInputRow)12 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)11 List (java.util.List)10 LongMeanAveragerFactory (org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 InputRow (org.apache.druid.data.input.InputRow)8 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)7 IOException (java.io.IOException)6 GroupByQueryConfig (org.apache.druid.query.groupby.GroupByQueryConfig)6 Function (com.google.common.base.Function)5