Search in sources :

Example 1 with AggregatorsModule

use of io.druid.jackson.AggregatorsModule in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQuery.

@Test
public void testIngestAndQuery() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"market\"" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(3.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 2 with AggregatorsModule

use of io.druid.jackson.AggregatorsModule in project druid by druid-io.

the class HyperUniquesAggregationTest method testIngestAndQueryPrecomputedHll.

@Test
public void testIngestAndQueryPrecomputedHll() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), config, tempFolder);
    String metricSpec = "[{" + "\"type\": \"hyperUnique\"," + "\"name\": \"index_hll\"," + "\"fieldName\": \"preComputedHll\"," + "\"isInputHyperUnique\": true" + "}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"preComputedHll\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"index_hll\", \"fieldName\": \"index_hll\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"hyperUniqueCardinality\", \"name\": \"index_unique_count\", \"fieldName\": \"index_hll\" }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.hll.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.DAY, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(4.0, row.getFloatMetric("index_hll"), 0.1);
    Assert.assertEquals(4.0, row.getFloatMetric("index_unique_count"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 3 with AggregatorsModule

use of io.druid.jackson.AggregatorsModule in project druid by druid-io.

the class FinalizingFieldAccessPostAggregatorTest method testIngestAndQueryWithArithmeticPostAggregator.

@Test
public void testIngestAndQueryWithArithmeticPostAggregator() throws Exception {
    AggregationTestHelper helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), GroupByQueryRunnerTest.testConfigs().get(0), tempFoler);
    String metricSpec = "[{\"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"market\"}," + "{\"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"quality\"}]";
    String parseSpec = "{" + "\"type\" : \"string\"," + "\"parseSpec\" : {" + "    \"format\" : \"tsv\"," + "    \"timestampSpec\" : {" + "        \"column\" : \"timestamp\"," + "        \"format\" : \"auto\"" + "}," + "    \"dimensionsSpec\" : {" + "        \"dimensions\": []," + "        \"dimensionExclusions\" : []," + "        \"spatialDimensions\" : []" + "    }," + "    \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]" + "  }" + "}";
    String query = "{" + "\"queryType\": \"groupBy\"," + "\"dataSource\": \"test_datasource\"," + "\"granularity\": \"ALL\"," + "\"dimensions\": []," + "\"aggregations\": [" + "  { \"type\": \"hyperUnique\", \"name\": \"hll_market\", \"fieldName\": \"hll_market\" }," + "  { \"type\": \"hyperUnique\", \"name\": \"hll_quality\", \"fieldName\": \"hll_quality\" }" + "]," + "\"postAggregations\": [" + "  { \"type\": \"arithmetic\", \"name\": \"uniq_add\", \"fn\": \"+\", \"fields\":[" + "    { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_market\", \"fieldName\": \"hll_market\" }," + "    { \"type\": \"finalizingFieldAccess\", \"name\": \"uniq_quality\", \"fieldName\": \"hll_quality\" }]" + "  }" + "]," + "\"intervals\": [ \"1970/2050\" ]" + "}";
    Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv").getFile()), parseSpec, metricSpec, 0, Granularities.NONE, 50000, query);
    MapBasedRow row = (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
    Assert.assertEquals(3.0, row.getFloatMetric("hll_market"), 0.1);
    Assert.assertEquals(9.0, row.getFloatMetric("hll_quality"), 0.1);
    Assert.assertEquals(12.0, row.getFloatMetric("uniq_add"), 0.1);
}
Also used : AggregatorsModule(io.druid.jackson.AggregatorsModule) MapBasedRow(io.druid.data.input.MapBasedRow) AggregationTestHelper(io.druid.query.aggregation.AggregationTestHelper) Sequence(io.druid.java.util.common.guava.Sequence) File(java.io.File) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Aggregations

MapBasedRow (io.druid.data.input.MapBasedRow)3 AggregatorsModule (io.druid.jackson.AggregatorsModule)3 Sequence (io.druid.java.util.common.guava.Sequence)3 AggregationTestHelper (io.druid.query.aggregation.AggregationTestHelper)3 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)3 File (java.io.File)3 Test (org.junit.Test)3