Search in sources :

Example 6 with AggregationTestHelper

use of org.apache.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class SketchAggregationWithSimpleDataTest method testTopNQueryWithSketchConstant.

@Test
public void testTopNQueryWithSketchConstant() throws Exception {
    AggregationTestHelper topNQueryAggregationTestHelper = AggregationTestHelper.createTopNQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
    Sequence seq = topNQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), (Query) SketchAggregationTest.readQueryFromClasspath("topn_query_sketch_const.json", topNQueryAggregationTestHelper.getObjectMapper(), vectorize));
    Result<TopNResultValue> result = (Result<TopNResultValue>) Iterables.getOnlyElement(seq.toList());
    Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp());
    DimensionAndMetricValueExtractor value1 = Iterables.get(result.getValue().getValue(), 0);
    Assert.assertEquals(38.0, value1.getDoubleMetric("sketch_count"), 0.01);
    Assert.assertEquals(38.0, value1.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
    Assert.assertEquals(2.0, value1.getDoubleMetric("sketchEstimatePostAggForSketchConstant"), 0.01);
    Assert.assertEquals(39.0, value1.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
    Assert.assertEquals(1.0, value1.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
    Assert.assertEquals(37.0, value1.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
    Assert.assertEquals("product_3", value1.getDimensionValue("product"));
    DimensionAndMetricValueExtractor value2 = Iterables.get(result.getValue().getValue(), 1);
    Assert.assertEquals(42.0, value2.getDoubleMetric("sketch_count"), 0.01);
    Assert.assertEquals(42.0, value2.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
    Assert.assertEquals(2.0, value2.getDoubleMetric("sketchEstimatePostAggForSketchConstant"), 0.01);
    Assert.assertEquals(42.0, value2.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
    Assert.assertEquals(2.0, value2.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
    Assert.assertEquals(40.0, value2.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
    Assert.assertEquals("product_1", value2.getDimensionValue("product"));
    DimensionAndMetricValueExtractor value3 = Iterables.get(result.getValue().getValue(), 2);
    Assert.assertEquals(42.0, value3.getDoubleMetric("sketch_count"), 0.01);
    Assert.assertEquals(42.0, value3.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
    Assert.assertEquals(2.0, value3.getDoubleMetric("sketchEstimatePostAggForSketchConstant"), 0.01);
    Assert.assertEquals(42.0, value3.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
    Assert.assertEquals(2.0, value3.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
    Assert.assertEquals(40.0, value3.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
    Assert.assertEquals("product_2", value3.getDimensionValue("product"));
}
Also used : TopNResultValue(org.apache.druid.query.topn.TopNResultValue) AggregationTestHelper(org.apache.druid.query.aggregation.AggregationTestHelper) Sequence(org.apache.druid.java.util.common.guava.Sequence) DimensionAndMetricValueExtractor(org.apache.druid.query.topn.DimensionAndMetricValueExtractor) Result(org.apache.druid.query.Result) GroupByQueryRunnerTest(org.apache.druid.query.groupby.GroupByQueryRunnerTest) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 7 with AggregationTestHelper

use of org.apache.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.

@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
    try (final AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Collections.singletonList(new AggregatorsModule()), config, tempFolder)) {
        String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
        String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + "  }" + "}";
        String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
        Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
        final ResultRow resultRow = seq.toList().get(0);
        Assert.assertEquals("index_hll", 4.0, ((Number) resultRow.get(0)).floatValue(), 0.1);
        Assert.assertEquals("index_unique_count", 4.0, ((Number) resultRow.get(1)).floatValue(), 0.1);
    }
}
Also used : AggregatorsModule(org.apache.druid.jackson.AggregatorsModule) ResultRow(org.apache.druid.query.groupby.ResultRow) AggregationTestHelper(org.apache.druid.query.aggregation.AggregationTestHelper) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(org.apache.druid.query.groupby.GroupByQueryRunnerTest)

Example 8 with AggregationTestHelper

use of org.apache.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class FinalizingFieldAccessPostAggregatorTest method testIngestAndQueryWithArithmeticPostAggregator.

@Test
public void testIngestAndQueryWithArithmeticPostAggregator() throws Exception {
    try (final AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Collections.singletonList(new AggregatorsModule()), GroupByQueryRunnerTest.testConfigs().get(0), tempFoler)) {
        String metricSpec = "[{\"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"market\"}," + "{\"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"quality\"}]";
        String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
        String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"hll_market\" }," + "  { \"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"hll_quality\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"arithmetic\", \"name\": \"uniq_add\", \"fn\": \"+\", \"fields\":[" + "    { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_market\", \"fieldName\": \"hll_market\" }," + "    { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_quality\", \"fieldName\": \"hll_quality\" }]" + "  }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
        Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
        final ResultRow resultRow = seq.toList().get(0);
        Assert.assertEquals("hll_market", 3.0, ((Number) resultRow.get(0)).floatValue(), 0.1);
        Assert.assertEquals("hll_quality", 9.0, ((Number) resultRow.get(1)).floatValue(), 0.1);
        Assert.assertEquals("uniq_add", 12.0, ((Number) resultRow.get(2)).floatValue(), 0.1);
    }
}
Also used : AggregatorsModule(org.apache.druid.jackson.AggregatorsModule) ResultRow(org.apache.druid.query.groupby.ResultRow) AggregationTestHelper(org.apache.druid.query.aggregation.AggregationTestHelper) File(java.io.File) GroupByQueryRunnerTest(org.apache.druid.query.groupby.GroupByQueryRunnerTest) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

AggregationTestHelper (org.apache.druid.query.aggregation.AggregationTestHelper)8 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)7 Test (org.junit.Test)7 File (java.io.File)5 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)5 Sequence (org.apache.druid.java.util.common.guava.Sequence)4 Result (org.apache.druid.query.Result)4 ResultRow (org.apache.druid.query.groupby.ResultRow)4 AggregatorsModule (org.apache.druid.jackson.AggregatorsModule)3 DimensionAndMetricValueExtractor (org.apache.druid.query.topn.DimensionAndMetricValueExtractor)3 TopNResultValue (org.apache.druid.query.topn.TopNResultValue)3 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)2 Before (org.junit.Before)2 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 Iterables (com.google.common.collect.Iterables)1 Files (com.google.common.io.Files)1 IOException (java.io.IOException)1 StandardCharsets (java.nio.charset.StandardCharsets)1 ArrayList (java.util.ArrayList)1