use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class ArrayOfDoublesSketchAggregationTest method buildingSketchesAtQueryTimeUsingNumericalTimeseries.
@Test
public void buildingSketchesAtQueryTimeUsingNumericalTimeseries() throws Exception {
Sequence<Result<TimeseriesResultValue>> seq = tsHelper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("tuple/array_of_doubles_build_data.tsv").getFile()), String.join("\n", "{", " \"type\": \"string\",", " \"parseSpec\": {", " \"format\": \"tsv\",", " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", " \"dimensionsSpec\": {", " \"dimensions\": [\"product\", \"key\", {\"type\": \"long\", \"name\": \"key_num\"}],", " \"dimensionExclusions\": [],", " \"spatialDimensions\": []", " },", " \"columns\": [\"timestamp\", \"product\", \"key\", \"key_num\", \"value\"]", " }", "}"), String.join("\n", "[", " {\"type\": \"doubleSum\", \"name\": \"value\", \"fieldName\": \"value\"}", "]"), // minTimestamp
0, Granularities.NONE, // maxRowCount
40, String.join("\n", "{", " \"queryType\": \"timeseries\",", " \"dataSource\": \"test_datasource\",", " \"granularity\": \"ALL\",", " \"aggregations\": [", " {\"type\": \"arrayOfDoublesSketch\", \"name\": \"sketch\", \"fieldName\": \"key_num\", \"metricColumns\": [\"value\"], \"nominalEntries\": 1024},", " {\"type\": \"count\", \"name\":\"cnt\"}", " ],", " \"postAggregations\": [", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"estimate\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"arrayOfDoublesSketchToQuantilesSketch\", \"name\": \"quantiles-sketch\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"union\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"union\",", " \"operation\": \"UNION\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"intersection\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"intersection\",", " \"operation\": \"INTERSECT\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"anotb\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"anotb\",", " \"operation\": \"NOT\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }}", " ],", " \"intervals\": [\"2015-01-01T00:00:00.000Z/2015-01-31T00:00:00.000Z\"]", "}"));
List<Result<TimeseriesResultValue>> results = seq.toList();
Assert.assertEquals(1, results.size());
TimeseriesResultValue row = results.get(0).getValue();
Assert.assertEquals("cnt", 40.0, row.getDoubleMetric("cnt"), 0);
Assert.assertEquals("sketch", 40.0, row.getDoubleMetric("sketch"), 0);
Assert.assertEquals("estimate", 40.0, row.getDoubleMetric("estimate"), 0);
Assert.assertEquals("union", 40.0, row.getDoubleMetric("union"), 0);
Assert.assertEquals("intersection", 40.0, row.getDoubleMetric("intersection"), 0);
Assert.assertEquals("anotb", 0, row.getDoubleMetric("anotb"), 0);
// quantiles-sketch
Object obj = row.getMetric("quantiles-sketch");
Assert.assertTrue(obj instanceof DoublesSketch);
DoublesSketch ds = (DoublesSketch) obj;
Assert.assertEquals(40, ds.getN());
Assert.assertEquals(1.0, ds.getMinValue(), 0);
Assert.assertEquals(1.0, ds.getMaxValue(), 0);
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class ArrayOfDoublesSketchAggregationTest method buildingSketchesAtQueryTimeTimeseries.
@Test
public void buildingSketchesAtQueryTimeTimeseries() throws Exception {
Sequence<Result<TimeseriesResultValue>> seq = tsHelper.createIndexAndRunQueryOnSegment(new File(this.getClass().getClassLoader().getResource("tuple/array_of_doubles_build_data.tsv").getFile()), String.join("\n", "{", " \"type\": \"string\",", " \"parseSpec\": {", " \"format\": \"tsv\",", " \"timestampSpec\": {\"column\": \"timestamp\", \"format\": \"yyyyMMddHH\"},", " \"dimensionsSpec\": {", " \"dimensions\": [\"product\", \"key\", {\"type\": \"long\", \"name\": \"key_num\"}],", " \"dimensionExclusions\": [],", " \"spatialDimensions\": []", " },", " \"columns\": [\"timestamp\", \"product\", \"key\", \"key_num\", \"value\"]", " }", "}"), String.join("\n", "[", " {\"type\": \"doubleSum\", \"name\": \"value\", \"fieldName\": \"value\"}", "]"), // minTimestamp
0, Granularities.NONE, // maxRowCount
40, String.join("\n", "{", " \"queryType\": \"timeseries\",", " \"dataSource\": \"test_datasource\",", " \"granularity\": \"ALL\",", " \"aggregations\": [", " {\"type\": \"arrayOfDoublesSketch\", \"name\": \"sketch\", \"fieldName\": \"key\", \"metricColumns\": [\"value\"], \"nominalEntries\": 1024},", " {\"type\": \"count\", \"name\":\"cnt\"}", " ],", " \"postAggregations\": [", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"estimate\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"arrayOfDoublesSketchToQuantilesSketch\", \"name\": \"quantiles-sketch\", \"field\": {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"union\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"union\",", " \"operation\": \"UNION\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"intersection\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"intersection\",", " \"operation\": \"INTERSECT\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }},", " {\"type\": \"arrayOfDoublesSketchToEstimate\", \"name\": \"anotb\", \"field\": {", " \"type\": \"arrayOfDoublesSketchSetOp\",", " \"name\": \"anotb\",", " \"operation\": \"NOT\",", " \"nominalEntries\": 1024,", " \"fields\": [{\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}, {\"type\": \"fieldAccess\", \"fieldName\": \"sketch\"}]", " }}", " ],", " \"intervals\": [\"2015-01-01T00:00:00.000Z/2015-01-31T00:00:00.000Z\"]", "}"));
List<Result<TimeseriesResultValue>> results = seq.toList();
Assert.assertEquals(1, results.size());
TimeseriesResultValue row = results.get(0).getValue();
Assert.assertEquals("cnt", 40.0, row.getDoubleMetric("cnt"), 0);
Assert.assertEquals("sketch", 40.0, row.getDoubleMetric("sketch"), 0);
Assert.assertEquals("estimate", 40.0, row.getDoubleMetric("estimate"), 0);
Assert.assertEquals("union", 40.0, row.getDoubleMetric("union"), 0);
Assert.assertEquals("intersection", 40.0, row.getDoubleMetric("intersection"), 0);
Assert.assertEquals("anotb", 0, row.getDoubleMetric("anotb"), 0);
// quantiles-sketch
Object obj = row.getMetric("quantiles-sketch");
Assert.assertTrue(obj instanceof DoublesSketch);
DoublesSketch ds = (DoublesSketch) obj;
Assert.assertEquals(40, ds.getN());
Assert.assertEquals(1.0, ds.getMinValue(), 0);
Assert.assertEquals(1.0, ds.getMaxValue(), 0);
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class CachingClusteredClientBenchmark method timeseriesQuery.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void timeseriesQuery(Blackhole blackhole) {
query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(basicSchemaIntervalSpec).aggregators(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential")).granularity(Granularity.fromString(queryGranularity)).context(ImmutableMap.of(QueryContexts.BROKER_PARALLEL_MERGE_KEY, parallelCombine, QueryContexts.BROKER_PARALLELISM, parallelism)).build();
final List<Result<TimeseriesResultValue>> results = runQuery();
for (Result<TimeseriesResultValue> result : results) {
blackhole.consume(result);
}
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class FilteredAggregatorBenchmark method querySingleQueryableIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleQueryableIndex(Blackhole blackhole, QueryableIndexState state) {
final QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy("qIndex"), new QueryableIndexSegment(state.qIndex, SegmentId.dummy("qIndex")));
List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query, vectorize);
for (Result<TimeseriesResultValue> result : results) {
blackhole.consume(result);
}
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class FilteredAggregatorBenchmark method querySingleIncrementalIndex.
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void querySingleIncrementalIndex(Blackhole blackhole, IncrementalIndexState state) {
QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, SegmentId.dummy("incIndex"), new IncrementalIndexSegment(state.incIndex, SegmentId.dummy("incIndex")));
List<Result<TimeseriesResultValue>> results = FilteredAggregatorBenchmark.runQuery(factory, runner, query, vectorize);
for (Result<TimeseriesResultValue> result : results) {
blackhole.consume(result);
}
}
Aggregations