use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testEmptyTimeseries.
@Test
public void testEmptyTimeseries() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.EMPTY_INTERVAL).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM, new VarianceAggregatorFactory("variance", "index", null, null))).descending(true).context(BaseCalciteQueryTest.QUERY_CONTEXT_DEFAULT).build();
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("rows", 0L);
resultMap.put("index", NullHandling.defaultDoubleValue());
resultMap.put("variance", NullHandling.defaultDoubleValue());
List<Result<TimeseriesResultValue>> expectedResults = ImmutableList.of(new Result<>(DateTimes.of("2020-04-02"), new TimeseriesResultValue(resultMap)));
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
TestHelper.assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class VarianceTimeseriesQueryTest method testTimeseriesWithNullFilterOnNonExistentDimension.
@Test
public void testTimeseriesWithNullFilterOnNonExistentDimension() {
TimeseriesQuery query = queryBuilder.dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).filters("bobby", null).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR).descending(descending).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(QueryRunnerTestHelper.of("rows", 13L, "index", 6626.151596069336, "addRowsIndexConstant", 6640.151596069336, "uniques", QueryRunnerTestHelper.UNIQUES_9, "index_var", descending ? 368885.6897238851 : 368885.689155086, "index_stddev", descending ? 607.3596049490657 : 607.35960448081))), new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(QueryRunnerTestHelper.of("rows", 13L, "index", 5833.2095947265625, "addRowsIndexConstant", 5847.2095947265625, "uniques", QueryRunnerTestHelper.UNIQUES_9, "index_var", descending ? 259061.6037088883 : 259061.60216419376, "index_stddev", descending ? 508.9809463122252 : 508.98094479478675))));
Iterable<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
assertExpectedResults(expectedResults, results);
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class StringLastTimeseriesQueryTest method testTimeseriesQuery.
@Test
public void testTimeseriesQuery() {
TimeseriesQueryEngine engine = new TimeseriesQueryEngine();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).aggregators(ImmutableList.of(new StringLastAggregatorFactory("nonfolding", CLIENT_TYPE, null, 1024), new StringLastAggregatorFactory("folding", LAST_CLIENT_TYPE, null, 1024), new StringLastAggregatorFactory("nonexistent", "nonexistent", null, 1024), new StringLastAggregatorFactory("numeric", "cnt", null, 1024))).build();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(TIME1, new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("nonfolding", new SerializablePairLongString(TIME2.getMillis(), "android")).put("folding", new SerializablePairLongString(TIME2.getMillis(), "android")).put("nonexistent", new SerializablePairLongString(DateTimes.MIN.getMillis(), null)).put("numeric", new SerializablePairLongString(DateTimes.MIN.getMillis(), null)).build())));
final Iterable<Result<TimeseriesResultValue>> iiResults = engine.process(query, new IncrementalIndexStorageAdapter(incrementalIndex)).toList();
final Iterable<Result<TimeseriesResultValue>> qiResults = engine.process(query, new QueryableIndexStorageAdapter(queryableIndex)).toList();
TestHelper.assertExpectedResults(expectedResults, iiResults, "incremental index");
TestHelper.assertExpectedResults(expectedResults, qiResults, "queryable index");
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class DoubleMeanAggregationTest method testAggretatorUsingTimeseriesQuery.
@Test
@Parameters(method = "doVectorize")
public void testAggretatorUsingTimeseriesQuery(boolean doVectorize) throws Exception {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals("1970/2050").aggregators(new DoubleMeanAggregatorFactory("meanOnDouble", SimpleTestIndex.DOUBLE_COL), new DoubleMeanAggregatorFactory("meanOnString", SimpleTestIndex.SINGLE_VALUE_DOUBLE_AS_STRING_DIM), new DoubleMeanAggregatorFactory("meanOnMultiValue", SimpleTestIndex.MULTI_VALUE_DOUBLE_AS_STRING_DIM)).context(ImmutableMap.of(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
// do json serialization and deserialization of query to ensure there are no serde issues
ObjectMapper jsonMapper = timeseriesQueryTestHelper.getObjectMapper();
query = (TimeseriesQuery) jsonMapper.readValue(jsonMapper.writeValueAsString(query), Query.class);
Sequence seq = timeseriesQueryTestHelper.runQueryOnSegmentsObjs(segments, query);
TimeseriesResultValue result = ((Result<TimeseriesResultValue>) Iterables.getOnlyElement(seq.toList())).getValue();
Assert.assertEquals(6.2d, result.getDoubleMetric("meanOnDouble").doubleValue(), 0.0001d);
Assert.assertEquals(6.2d, result.getDoubleMetric("meanOnString").doubleValue(), 0.0001d);
Assert.assertEquals(4.1333d, result.getDoubleMetric("meanOnMultiValue").doubleValue(), 0.0001d);
}
use of org.apache.druid.query.timeseries.TimeseriesResultValue in project druid by druid-io.
the class SpecificSegmentQueryRunnerTest method testRetry2.
@SuppressWarnings("unchecked")
@Test
public void testRetry2() throws Exception {
final ObjectMapper mapper = new DefaultObjectMapper();
SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
TimeseriesResultBuilder builder = new TimeseriesResultBuilder(DateTimes.of("2012-01-01T00:00:00Z"));
CountAggregator rows = new CountAggregator();
rows.aggregate();
builder.addMetric("rows", rows.get());
final Result<TimeseriesResultValue> value = builder.build();
final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
return Sequences.withEffect(Sequences.simple(Collections.singletonList(value)), new Runnable() {
@Override
public void run() {
throw new SegmentMissingException("FAILSAUCE");
}
}, Execs.directExecutor());
}
}, new SpecificSegmentSpec(descriptor));
final ResponseContext responseContext = ResponseContext.createEmpty();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
List<Result<TimeseriesResultValue>> res = results.toList();
Assert.assertEquals(1, res.size());
Result<TimeseriesResultValue> theVal = res.get(0);
Assert.assertTrue(1L == theVal.getValue().getLongMetric("rows"));
validate(mapper, descriptor, responseContext);
}
Aggregations