use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class GroupByTypeInterfaceBenchmark method querySingleQueryableIndexFloatOnly.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexFloatOnly(Blackhole blackhole) {
QueryRunner<ResultRow> runner = QueryBenchmarkUtil.makeQueryRunner(factory, Q_INDEX_SEGMENT_ID, new QueryableIndexSegment(queryableIndexes.get(0), Q_INDEX_SEGMENT_ID));
List<ResultRow> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, floatQuery);
for (ResultRow result : results) {
blackhole.consume(result);
}
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class GroupByTypeInterfaceBenchmark method querySingleQueryableIndexNumericThenString.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndexNumericThenString(Blackhole blackhole) {
QueryRunner<ResultRow> runner = QueryBenchmarkUtil.makeQueryRunner(factory, Q_INDEX_SEGMENT_ID, new QueryableIndexSegment(queryableIndexes.get(0), Q_INDEX_SEGMENT_ID));
List<ResultRow> results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, longFloatQuery);
for (ResultRow result : results) {
blackhole.consume(result);
}
runner = QueryBenchmarkUtil.makeQueryRunner(factory, Q_INDEX_SEGMENT_ID, new QueryableIndexSegment(queryableIndexes.get(0), Q_INDEX_SEGMENT_ID));
results = GroupByTypeInterfaceBenchmark.runQuery(factory, runner, stringQuery);
for (ResultRow result : results) {
blackhole.consume(result);
}
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class TimestampGroupByAggregationTest method testSimpleDataIngestionAndGroupByTest.
@Test
public void testSimpleDataIngestionAndGroupByTest() throws Exception {
String recordParser = "{\n" + " \"type\": \"string\",\n" + " \"parseSpec\": {\n" + " \"format\": \"tsv\",\n" + " \"timestampSpec\": {\n" + " \"column\": \"timestamp\",\n" + " \"format\": \"auto\"\n" + " },\n" + " \"dimensionsSpec\": {\n" + " \"dimensions\": [\n" + " \"product\"\n" + " ],\n" + " \"dimensionExclusions\": [],\n" + " \"spatialDimensions\": []\n" + " },\n" + " \"columns\": [\n" + " \"timestamp\",\n" + " \"cat\",\n" + " \"product\",\n" + " \"prefer\",\n" + " \"prefer2\",\n" + " \"pty_country\"\n" + " ]\n" + " }\n" + "}";
String aggregator = "[\n" + " {\n" + " \"type\": \"" + aggType + "\",\n" + " \"name\": \"" + aggField + "\",\n" + " \"fieldName\": \"timestamp\"\n" + " }\n" + "]";
String groupBy = "{\n" + " \"queryType\": \"groupBy\",\n" + " \"dataSource\": \"test_datasource\",\n" + " \"granularity\": \"MONTH\",\n" + " \"dimensions\": [\"product\"],\n" + " \"aggregations\": [\n" + " {\n" + " \"type\": \"" + aggType + "\",\n" + " \"name\": \"" + groupByField + "\",\n" + " \"fieldName\": \"" + aggField + "\"\n" + " }\n" + " ],\n" + " \"intervals\": [\n" + " \"2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z\"\n" + " ]\n" + "}";
ZipFile zip = new ZipFile(new File(this.getClass().getClassLoader().getResource("druid.sample.tsv.zip").toURI()));
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(zip.getInputStream(zip.getEntry("druid.sample.tsv")), recordParser, aggregator, 0, Granularities.MONTH, 100, groupBy);
int groupByFieldNumber = ((GroupByQuery) helper.readQuery(groupBy)).getResultRowSignature().indexOf(groupByField);
List<ResultRow> results = seq.toList();
Assert.assertEquals(36, results.size());
Assert.assertEquals(expected, results.get(0).get(groupByFieldNumber));
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class MaterializedViewQueryQueryToolChestTest method testDecorateObjectMapperMaterializedViewQuery.
@Test
public void testDecorateObjectMapperMaterializedViewQuery() throws IOException {
GroupByQuery realQuery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)).build();
MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null);
QueryToolChest materializedViewQueryQueryToolChest = new MaterializedViewQueryQueryToolChest(new MapQueryToolChestWarehouse(ImmutableMap.<Class<? extends Query>, QueryToolChest>builder().put(GroupByQuery.class, new GroupByQueryQueryToolChest(null)).build()));
ObjectMapper objectMapper = materializedViewQueryQueryToolChest.decorateObjectMapper(JSON_MAPPER, materializedViewQuery);
List<ResultRow> results = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), GroupByQueryRunnerTestHelper.createExpectedRow(realQuery, "2011-04-01", "alias", "business", "rows", 1L, "idx", 118L));
List<MapBasedRow> expectedResults = results.stream().map(resultRow -> resultRow.toMapBasedRow(realQuery)).collect(Collectors.toList());
Assert.assertEquals("decorate-object-mapper", JSON_MAPPER.writerFor(new TypeReference<List<MapBasedRow>>() {
}).writeValueAsString(expectedResults), objectMapper.writeValueAsString(results));
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class MomentsSketchAggregatorTest method buildingSketchesAtIngestionTime.
@Test
public void buildingSketchesAtIngestionTime() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("doubles_build_data.tsv").getFile()), String.join("\n", "{", " \"type\": \"string\",", " \"parseSpec\": {", " \"format\": \"tsv\",", " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", " \"dimensionsSpec\": {", " \"dimensions\": [\"product\"],", " \"dimensionExclusions\": [ \"sequenceNumber\"],", " \"spatialDimensions\": []", " },", " \"columns\": [\"timestamp\", \"sequenceNumber\", \"product\", \"value\", \"valueWithNulls\"]", " }", "}"), "[" + "{\"type\": \"momentSketch\", \"name\": \"sketch\", \"fieldName\": \"value\", \"k\": 10, \"compress\": true}," + "{\"type\": \"momentSketch\", \"name\": \"sketchWithNulls\", \"fieldName\": \"valueWithNulls\", \"k\": 10, \"compress\": true}" + "]", 0, // minTimestamp
Granularities.NONE, 10, // maxRowCount
String.join("\n", "{", " \"queryType\": \"groupBy\",", " \"dataSource\": \"test_datasource\",", " \"granularity\": \"ALL\",", " \"dimensions\": [],", " \"aggregations\": [", " {\"type\": \"momentSketchMerge\", \"name\": \"sketch\", \"fieldName\": \"sketch\", \"k\": 10, \"compress\": true},", " {\"type\": \"momentSketchMerge\", \"name\": \"sketchWithNulls\", \"fieldName\": \"sketchWithNulls\", \"k\": 10, \"compress\": true}", " ],", " \"postAggregations\": [", " {\"type\": \"momentSketchSolveQuantiles\", \"name\": \"quantiles\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"momentSketchMin\", \"name\": \"min\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"momentSketchMax\", \"name\": \"max\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"momentSketchSolveQuantiles\", \"name\": \"quantilesWithNulls\", \"fractions\": [0, 0.5, 1], \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}},", " {\"type\": \"momentSketchMin\", \"name\": \"minWithNulls\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}},", " {\"type\": \"momentSketchMax\", \"name\": \"maxWithNulls\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketchWithNulls\"}}", " ],", " \"intervals\": [\"2016-01-01T00:00:00.000Z/2016-01-31T00:00:00.000Z\"]", "}"));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
// "sketch"
MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.get(0);
// 400 total products since this is pre-rollup
Assert.assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10);
// "sketchWithNulls"
MomentSketchWrapper sketchObjectWithNulls = (MomentSketchWrapper) row.get(1);
// 23 null values (377 when nulls are not replaced with default)
Assert.assertEquals(NullHandling.replaceWithDefault() ? 400.0 : 377.0, sketchObjectWithNulls.getPowerSums()[0], 1e-10);
// "quantiles"
double[] quantilesArray = (double[]) row.get(2);
Assert.assertEquals(0, quantilesArray[0], 0.05);
Assert.assertEquals(.5, quantilesArray[1], 0.05);
Assert.assertEquals(1.0, quantilesArray[2], 0.05);
// "min"
Double minValue = (Double) row.get(3);
Assert.assertEquals(0.0011, minValue, 0.0001);
// "max"
Double maxValue = (Double) row.get(4);
Assert.assertEquals(0.9969, maxValue, 0.0001);
// "quantilesWithNulls"
double[] quantilesArrayWithNulls = (double[]) row.get(5);
Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.0 : 5.0, quantilesArrayWithNulls[0], 0.05);
Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.721400294818661d : 7.57, quantilesArrayWithNulls[1], 0.05);
Assert.assertEquals(10.0, quantilesArrayWithNulls[2], 0.05);
// "minWithNulls"
Double minValueWithNulls = (Double) row.get(6);
Assert.assertEquals(NullHandling.replaceWithDefault() ? 0.0 : 5.0164, minValueWithNulls, 0.0001);
// "maxWithNulls"
Double maxValueWithNulls = (Double) row.get(7);
Assert.assertEquals(9.9788, maxValueWithNulls, 0.0001);
}
Aggregations