use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class MapVirtualColumnGroupByTest method testWithSubColumn.
@Test
public void testWithSubColumn() {
final GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("params.key3", "params.key3")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
final List<ResultRow> result = runner.run(QueryPlus.wrap(query)).toList();
final List<ResultRow> expected = ImmutableList.of(new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3")), new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 2L))).stream().map(row -> ResultRow.fromLegacyRow(row, query)).collect(Collectors.toList());
Assert.assertEquals(expected, result);
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class SketchAggregationTest method testRetentionDataIngestAndGpByQuery.
@Test
public void testRetentionDataIngestAndGpByQuery() throws Exception {
final GroupByQuery groupByQuery = readQueryFromClasspath("retention_test_data_group_by_query.json", helper.getObjectMapper(), vectorize);
final Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("retention_test_data.tsv").getFile()), readFileFromClasspathAsString("simple_test_data_record_parser.json"), readFileFromClasspathAsString("simple_test_data_aggregators.json"), 0, Granularities.NONE, 5, groupByQuery);
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
Assert.assertEquals(ImmutableList.of(new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_1").put("p1_unique_country_day_1", 20.0).put("p1_unique_country_day_2", 20.0).put("p1_unique_country_day_3", 10.0).put("sketchEstimatePostAgg", 20.0).put("sketchIntersectionPostAggEstimate1", 10.0).put("sketchIntersectionPostAggEstimate2", 5.0).put("non_existing_col_validation", 0.0).build())).stream().map(row -> ResultRow.fromLegacyRow(row, groupByQuery)).collect(Collectors.toList()), results);
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class SketchAggregationTest method testEmptySketchAggregateCombine.
@Test
public void testEmptySketchAggregateCombine() throws Exception {
final GroupByQuery groupByQuery = readQueryFromClasspath("empty_sketch_group_by_query.json", helper.getObjectMapper(), vectorize);
final Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(SketchAggregationTest.class.getClassLoader().getResource("empty_sketch_data.tsv").getFile()), readFileFromClasspathAsString("empty_sketch_data_record_parser.json"), readFileFromClasspathAsString("empty_sketch_test_data_aggregators.json"), 0, Granularities.NONE, 5, groupByQuery);
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
Assert.assertEquals(ResultRow.fromLegacyRow(new MapBasedRow(DateTimes.of("2019-07-14T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_b").put("sketch_count", 0.0).build()), groupByQuery), results.get(0));
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class SketchAggregationWithSimpleDataTest method testSimpleDataIngestAndGpByQuery.
@Test
public void testSimpleDataIngestAndGpByQuery() throws Exception {
try (final AggregationTestHelper gpByQueryAggregationTestHelper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(sm.getJacksonModules(), config, tempFolder)) {
final GroupByQuery groupByQuery = SketchAggregationTest.readQueryFromClasspath("simple_test_data_group_by_query.json", gpByQueryAggregationTestHelper.getObjectMapper(), vectorize);
Sequence<ResultRow> seq = gpByQueryAggregationTestHelper.runQueryOnSegments(ImmutableList.of(s1, s2), groupByQuery);
List<MapBasedRow> results = seq.map(row -> row.toMapBasedRow(groupByQuery)).toList();
Assert.assertEquals(5, results.size());
Assert.assertEquals(ImmutableList.of(new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_3").put("sketch_count", 38.0).put("sketchEstimatePostAgg", 38.0).put("sketchUnionPostAggEstimate", 38.0).put("sketchIntersectionPostAggEstimate", 38.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_1").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_2").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_4").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("product", "product_5").put("sketch_count", 42.0).put("sketchEstimatePostAgg", 42.0).put("sketchUnionPostAggEstimate", 42.0).put("sketchIntersectionPostAggEstimate", 42.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build())), results);
}
}
use of org.apache.druid.query.groupby.GroupByQuery in project druid by druid-io.
the class OldApiSketchAggregationTest method testSketchDataIngestAndQuery.
@Test
public void testSketchDataIngestAndQuery() throws Exception {
final String groupByQueryString = readFileFromClasspathAsString("oldapi/old_sketch_test_data_group_by_query.json");
final GroupByQuery groupByQuery = (GroupByQuery) helper.getObjectMapper().readValue(groupByQueryString, Query.class);
final Sequence seq = helper.createIndexAndRunQueryOnSegment(new File(OldApiSketchAggregationTest.class.getClassLoader().getResource("sketch_test_data.tsv").getFile()), readFileFromClasspathAsString("sketch_test_data_record_parser.json"), readFileFromClasspathAsString("oldapi/old_sketch_test_data_aggregators.json"), 0, Granularities.NONE, 1000, groupByQueryString);
List results = seq.toList();
Assert.assertEquals(1, results.size());
Assert.assertEquals(ResultRow.fromLegacyRow(new MapBasedRow(DateTimes.of("2014-10-19T00:00:00.000Z"), ImmutableMap.<String, Object>builder().put("sids_sketch_count", 50.0).put("sketchEstimatePostAgg", 50.0).put("sketchUnionPostAggEstimate", 50.0).put("sketchIntersectionPostAggEstimate", 50.0).put("sketchAnotBPostAggEstimate", 0.0).put("non_existing_col_validation", 0.0).build()), groupByQuery), results.get(0));
}
Aggregations