use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class AppendTest method testFilteredTimeSeries2.
@Test
public void testFilteredTimeSeries2() {
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 4L).put("index", 400.0D).put("addRowsIndexConstant", 405.0D).put("uniques", 0.0D).put("maxIndex", 100.0D).put("minIndex", 100.0D).build())));
TimeseriesQuery query = makeFilteredTimeseriesQuery();
QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment2);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
use of io.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class DruidQueryBuilder method toTimeseriesQuery.
/**
* Return this query as a Timeseries query, or null if this query is not compatible with Timeseries.
*
* @param dataSource data source to query
* @param sourceRowSignature row signature of the dataSource
* @param context query context
*
* @return query or null
*/
public TimeseriesQuery toTimeseriesQuery(final DataSource dataSource, final RowSignature sourceRowSignature, final Map<String, Object> context) {
if (grouping == null || having != null) {
return null;
}
final Granularity queryGranularity;
final List<DimensionSpec> dimensions = grouping.getDimensions();
if (dimensions.isEmpty()) {
queryGranularity = Granularities.ALL;
} else if (dimensions.size() == 1) {
final DimensionSpec dimensionSpec = Iterables.getOnlyElement(dimensions);
final Granularity gran = ExtractionFns.toQueryGranularity(dimensionSpec.getExtractionFn());
if (gran == null || !dimensionSpec.getDimension().equals(Column.TIME_COLUMN_NAME)) {
// Timeseries only applies if the single dimension is granular __time.
return null;
}
// Timeseries only applies if sort is null, or if the first sort field is the time dimension.
final boolean sortingOnTime = limitSpec == null || limitSpec.getColumns().isEmpty() || (limitSpec.getLimit() == Integer.MAX_VALUE && limitSpec.getColumns().get(0).getDimension().equals(dimensionSpec.getOutputName()));
if (sortingOnTime) {
queryGranularity = gran;
} else {
return null;
}
} else {
return null;
}
final Filtration filtration = Filtration.create(filter).optimize(sourceRowSignature);
final boolean descending;
if (limitSpec != null && !limitSpec.getColumns().isEmpty()) {
descending = limitSpec.getColumns().get(0).getDirection() == OrderByColumnSpec.Direction.DESCENDING;
} else {
descending = false;
}
final Map<String, Object> theContext = Maps.newHashMap();
theContext.put("skipEmptyBuckets", true);
theContext.putAll(context);
return new TimeseriesQuery(dataSource, filtration.getQuerySegmentSpec(), descending, VirtualColumns.EMPTY, filtration.getDimFilter(), queryGranularity, grouping.getAggregatorFactories(), grouping.getPostAggregators(), theContext);
}
Aggregations