use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class HllSketchAggregatorTest method roundMergeSketch.
@Test
public void roundMergeSketch() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("hll/hll_sketches.tsv").getFile()), buildParserJson(Arrays.asList("dim", "multiDim"), Arrays.asList("timestamp", "dim", "multiDim", "sketch")), buildAggregatorJson("HLLSketchMerge", "sketch", ROUND), // minTimestamp
0, Granularities.NONE, // maxRowCount
200, buildGroupByQueryJson("HLLSketchMerge", "sketch", ROUND));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
Assert.assertEquals(200L, (long) row.get(0));
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class HllSketchAggregatorTest method buildSketchesAtQueryTimeMultiValue.
@Test
public void buildSketchesAtQueryTimeMultiValue() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("hll/hll_raw.tsv").getFile()), buildParserJson(Arrays.asList("dim", "multiDim", "id"), Arrays.asList("timestamp", "dim", "multiDim", "id")), "[]", // minTimestamp
0, Granularities.NONE, // maxRowCount
200, buildGroupByQueryJson("HLLSketchBuild", "multiDim", !ROUND));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
Assert.assertEquals(14, (double) row.get(0), 0.1);
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class HllSketchAggregatorTest method ingestSketches.
@Test
public void ingestSketches() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("hll/hll_sketches.tsv").getFile()), buildParserJson(Arrays.asList("dim", "multiDim"), Arrays.asList("timestamp", "dim", "multiDim", "sketch")), buildAggregatorJson("HLLSketchMerge", "sketch", !ROUND), // minTimestamp
0, Granularities.NONE, // maxRowCount
200, buildGroupByQueryJson("HLLSketchMerge", "sketch", !ROUND));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
Assert.assertEquals(200, (double) row.get(0), 0.1);
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class HllSketchAggregatorTest method roundBuildSketch.
@Test
public void roundBuildSketch() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("hll/hll_raw.tsv").getFile()), buildParserJson(Arrays.asList("dim", "multiDim", "id"), Arrays.asList("timestamp", "dim", "multiDim", "id")), "[]", // minTimestamp
0, Granularities.NONE, // maxRowCount
200, buildGroupByQueryJson("HLLSketchBuild", "id", ROUND));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
Assert.assertEquals(200L, (long) row.get(0));
}
use of org.apache.druid.query.groupby.ResultRow in project druid by druid-io.
the class HllSketchAggregatorTest method buildSketchesAtIngestionTime.
@Test
public void buildSketchesAtIngestionTime() throws Exception {
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("hll/hll_raw.tsv").getFile()), buildParserJson(Collections.singletonList("dim"), Arrays.asList("timestamp", "dim", "multiDim", "id")), buildAggregatorJson("HLLSketchBuild", "id", !ROUND), // minTimestamp
0, Granularities.NONE, // maxRowCount
200, buildGroupByQueryJson("HLLSketchMerge", "sketch", !ROUND));
List<ResultRow> results = seq.toList();
Assert.assertEquals(1, results.size());
ResultRow row = results.get(0);
Assert.assertEquals(200, (double) row.get(0), 0.1);
}
Aggregations