use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.
the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndGpByQuery.
@Test
public void testSimpleDataIngestAndGpByQuery() throws Exception {
AggregationTestHelper gpByQueryAggregationTestHelper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(sm.getJacksonModules(), config, tempFolder);
Sequence seq = gpByQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("simple_test_data_group_by_query.json"));
List<Row> results = Sequences.toList(seq, Lists.<Row>newArrayList());
Assert.assertEquals(5, results.size());
Assert.assertEquals(ImmutableList.of(new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_3").put("sketch_count", 38.0).put("sketchEstimatePostAgg", 38.0).put("sketchUnionPostAggEstimate", 38.0).put("sketchIntersectionPostAggEstimate", 38.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_1").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_2").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_4").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTime.parse("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_5").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build())), results);
}
use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.
the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndSelectQuery.
@Test
public void testSimpleDataIngestAndSelectQuery() throws Exception {
SketchModule sm = new SketchModule();
sm.configure(null);
AggregationTestHelper selectQueryAggregationTestHelper = AggregationTestHelper.createSelectQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
Sequence seq = selectQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("select_query.json"));
Result<SelectResultValue> result = (Result<SelectResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
Assert.assertEquals(100, result.getValue().getEvents().size());
Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country"));
}
use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.
the class SketchAggregationTestWithSimpleData method testSimpleDataIngestAndTopNQuery.
@Test
public void testSimpleDataIngestAndTopNQuery() throws Exception {
AggregationTestHelper topNQueryAggregationTestHelper = AggregationTestHelper.createTopNQueryAggregationTestHelper(sm.getJacksonModules(), tempFolder);
Sequence seq = topNQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), readFileFromClasspathAsString("topn_query.json"));
Result<TopNResultValue> result = (Result<TopNResultValue>) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList()));
Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp());
DimensionAndMetricValueExtractor value = Iterables.getOnlyElement(result.getValue().getValue());
Assert.assertEquals(38.0, value.getDoubleMetric("sketch_count"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchEstimatePostAgg"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchUnionPostAggEstimate"), 0.01);
Assert.assertEquals(38.0, value.getDoubleMetric("sketchIntersectionPostAggEstimate"), 0.01);
Assert.assertEquals(0.0, value.getDoubleMetric("sketchAnotBPostAggEstimate"), 0.01);
Assert.assertEquals(0.0, value.getDoubleMetric("non_existing_col_validation"), 0.01);
Assert.assertEquals("product_3", value.getDimensionValue("product"));
}
use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.
the class HyperUniquesAggregationTest method testIngestAndQuery.
@Test
public void testIngestAndQuery() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
}
use of io.druid.query.aggregation.AggregationTestHelper in project druid by druid-io.
the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.
@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + " \"format\" : \"tsv\"," + " \"timestampSpec\" : {" + " \"column\" : \"timestamp\"," + " \"format\" : \"auto\"" + "}," + " \"dimensionsSpec\" : {" + " \"dimensions\": []," + " \"dimensionExclusions\" : []," + " \"spatialDimensions\" : []" + " }," + " \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + " }" + "}";
String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + " { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + " { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
Assert.assertEquals(4.0, row.getFloatMetric("index_hll"), 0.1);
Assert.assertEquals(4.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Aggregations