Search in sources :

Example 1 with AggregationTestHelper

use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndGpByQuery.

@Test
public void testSimpleDataIngestAndGpByQuery() throws Exception {
    AggregationTestHelper gpByQueryAggregationTestHelper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(sm.getJacksonModules(), config, tempFolder);
    Sequence seq = gpByQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("simple_test_data_group_by_query.json"));
    List<Row> results = Sequences.toList(seq, Lists.<Row>newArrayList());
    Assert.assertEquals(5, results.size());
    Assert.assertEquals(ImmutableList.of(new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_3").put("sketch_count", 38.0).put("sketchEstimatePostAgg", 38.0).put("sketchUnionPostAggEstimate", 38.0).put("sketchIntersectionPostAggEstimate", 38.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_1").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_2").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_4").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_5").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build())), results);
}
Also used : MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) Row(io.druid.data.input.Row) MapBasedRow(io.druid.data.input.MapBasedRow) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 2 with AggregationTestHelper

use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndSelectQuery.

@Test
public void testSimpleDataIngestAndSelectQuery() throws Exception {
    SketchModule sm = new SketchModule();
    sm.configure(null);
    AggregationTestHelper selectQueryAggregationTestHelper = AggregationTestHelper.createSelectQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
    Sequence seq = selectQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("select_query.json"));
    Result<SelectResultValue> result = (Result<SelectResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
    Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
    Assert.assertEquals(100, result.getValue().getEvents().size());
    Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country"));
}
Also used : SelectResultValue(io.druid.query.select.SelectResultValue) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 3 with AggregationTestHelper

use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndTopNQuery.

@Test
public void testSimpleDataIngestAndTopNQuery() throws Exception {
    AggregationTestHelper topNQueryAggregationTestHelper = AggregationTestHelper.createTopNQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
    Sequence seq = topNQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("topn_query.json"));
    Result<TopNResultValue> result = (Result<TopNResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
    Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
    DimensionAndMetricValueExtractor value = Iterables.getOnlyElement(result.getValue().getValue());
    Assert.assertEquals(38.0, value.getDoubleMetric("sketch_count"), 0.01);
    Assert.assertEquals(38.0, value.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
    Assert.assertEquals(38.0, value.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
    Assert.assertEquals(38.0, value.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
    Assert.assertEquals(0.0, value.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
    Assert.assertEquals(0.0, value.getDoubleMetric("non_existing_col_validation"), 0.01);
    Assert.assertEquals("product_3", value.getDimensionValue("product"));
}
Also used : TopNResultValue(io.druid.query.topn.TopNResultValue) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) DateTime(org.joda.time.DateTime) DimensionAndMetricValueExtractor(io.druid.query.topn.DimensionAndMetricValueExtractor) Result(io.druid.query.Result) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 4 with AggregationTestHelper

use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQuery.

@Test
public void testIngestAndQuery() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 5 with AggregationTestHelper

use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.

@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(4.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(4.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Aggregations

AggregationTestHelper (io.druid.query.aggregation.AggregationTestHelper)8 Sequence (io.druid.java.util.common.guava.Sequence)7 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)7 Test (org.junit.Test)7 MapBasedRow (io.druid.data.input.MapBasedRow)4 File (java.io.File)4 AggregatorsModule (io.druid.jackson.AggregatorsModule)3 Result (io.druid.query.Result)3 DateTime (org.joda.time.DateTime)3 Row (io.druid.data.input.Row)1 SelectResultValue (io.druid.query.select.SelectResultValue)1 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)1 DimensionAndMetricValueExtractor (io.druid.query.topn.DimensionAndMetricValueExtractor)1 TopNResultValue (io.druid.query.topn.TopNResultValue)1 Before (org.junit.Before)1