Search in sources :

Example 16 with Result

use of io.druid.query.Result in project druid by druid-io.

the class RealtimeIndexTaskTest method sumMetric.

public long sumMetric(final Task task, final String metric) throws Exception {
    // Do a query.
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test_ds").aggregators(ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory(metric, metric))).granularity(Granularities.ALL).intervals("2000/3000").build();
    ArrayList<Result<TimeseriesResultValue>> results = Sequences.toList(task.getQueryRunner(query).run(query, ImmutableMap.<String, Object>of()), Lists.<Result<TimeseriesResultValue>>newArrayList());
    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Result(io.druid.query.Result)

Example 17 with Result

use of io.druid.query.Result in project druid by druid-io.

the class SearchBinaryFn method apply.

@Override
public Result<SearchResultValue> apply(Result<SearchResultValue> arg1, Result<SearchResultValue> arg2) {
    if (arg1 == null) {
        return arg2;
    }
    if (arg2 == null) {
        return arg1;
    }
    final int limit = gran instanceof AllGranularity ? this.limit : -1;
    SearchResultValue arg1Vals = arg1.getValue();
    SearchResultValue arg2Vals = arg2.getValue();
    Iterable<SearchHit> merged = Iterables.mergeSorted(Arrays.asList(arg1Vals, arg2Vals), searchSortSpec.getComparator());
    int maxSize = arg1Vals.getValue().size() + arg2Vals.getValue().size();
    if (limit > 0) {
        maxSize = Math.min(limit, maxSize);
    }
    List<SearchHit> results = Lists.newArrayListWithExpectedSize(maxSize);
    SearchHit prev = null;
    for (SearchHit searchHit : merged) {
        if (prev == null) {
            prev = searchHit;
            continue;
        }
        if (prev.equals(searchHit)) {
            if (prev.getCount() != null && searchHit.getCount() != null) {
                prev = new SearchHit(prev.getDimension(), prev.getValue(), prev.getCount() + searchHit.getCount());
            } else {
                prev = new SearchHit(prev.getDimension(), prev.getValue());
            }
        } else {
            results.add(prev);
            prev = searchHit;
            if (limit > 0 && results.size() >= limit) {
                break;
            }
        }
    }
    if (prev != null && (limit < 0 || results.size() < limit)) {
        results.add(prev);
    }
    final DateTime timestamp = gran instanceof AllGranularity ? arg1.getTimestamp() : gran.bucketStart(arg1.getTimestamp());
    return new Result<SearchResultValue>(timestamp, new SearchResultValue(results));
}
Also used : SearchHit(io.druid.query.search.search.SearchHit) AllGranularity(io.druid.java.util.common.granularity.AllGranularity) DateTime(org.joda.time.DateTime) Result(io.druid.query.Result)

Example 18 with Result

use of io.druid.query.Result in project druid by druid-io.

the class TopNQueryEngine method query.

public Sequence<Result<TopNResultValue>> query(final TopNQuery query, final StorageAdapter adapter) {
    if (adapter == null) {
        throw new SegmentMissingException("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }
    final List<Interval> queryIntervals = query.getQuerySegmentSpec().getIntervals();
    final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
    final Granularity granularity = query.getGranularity();
    final Function<Cursor, Result<TopNResultValue>> mapFn = getMapFn(query, adapter);
    Preconditions.checkArgument(queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals);
    return Sequences.filter(Sequences.map(adapter.makeCursors(filter, queryIntervals.get(0), query.getVirtualColumns(), granularity, query.isDescending()), new Function<Cursor, Result<TopNResultValue>>() {

        @Override
        public Result<TopNResultValue> apply(Cursor input) {
            log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
            return mapFn.apply(input);
        }
    }), Predicates.<Result<TopNResultValue>>notNull());
}
Also used : Function(com.google.common.base.Function) Filter(io.druid.query.filter.Filter) SegmentMissingException(io.druid.segment.SegmentMissingException) Granularity(io.druid.java.util.common.granularity.Granularity) Cursor(io.druid.segment.Cursor) Interval(org.joda.time.Interval) Result(io.druid.query.Result)

Example 19 with Result

use of io.druid.query.Result in project druid by druid-io.

the class TopNQueryQueryToolChest method makePostComputeManipulatorFn.

@Override
public Function<Result<TopNResultValue>, Result<TopNResultValue>> makePostComputeManipulatorFn(final TopNQuery query, final MetricManipulationFn fn) {
    return new Function<Result<TopNResultValue>, Result<TopNResultValue>>() {

        private String dimension = query.getDimensionSpec().getOutputName();

        private final AggregatorFactory[] aggregatorFactories = query.getAggregatorSpecs().toArray(new AggregatorFactory[0]);

        private final String[] aggFactoryNames = extractFactoryName(query.getAggregatorSpecs());

        private final PostAggregator[] postAggregators = query.getPostAggregatorSpecs().toArray(new PostAggregator[0]);

        @Override
        public Result<TopNResultValue> apply(Result<TopNResultValue> result) {
            List<Map<String, Object>> serializedValues = Lists.newArrayList(Iterables.transform(result.getValue(), new Function<DimensionAndMetricValueExtractor, Map<String, Object>>() {

                @Override
                public Map<String, Object> apply(DimensionAndMetricValueExtractor input) {
                    final Map<String, Object> values = Maps.newHashMapWithExpectedSize(aggregatorFactories.length + query.getPostAggregatorSpecs().size() + 1);
                    for (int i = 0; i < aggFactoryNames.length; ++i) {
                        final String name = aggFactoryNames[i];
                        values.put(name, input.getMetric(name));
                    }
                    for (PostAggregator postAgg : postAggregators) {
                        Object calculatedPostAgg = input.getMetric(postAgg.getName());
                        if (calculatedPostAgg != null) {
                            values.put(postAgg.getName(), calculatedPostAgg);
                        } else {
                            values.put(postAgg.getName(), postAgg.compute(values));
                        }
                    }
                    for (int i = 0; i < aggFactoryNames.length; ++i) {
                        final String name = aggFactoryNames[i];
                        values.put(name, fn.manipulate(aggregatorFactories[i], input.getMetric(name)));
                    }
                    values.put(dimension, input.getDimensionValue(dimension));
                    return values;
                }
            }));
            return new Result<>(result.getTimestamp(), new TopNResultValue(serializedValues));
        }
    };
}
Also used : Function(com.google.common.base.Function) PostAggregator(io.druid.query.aggregation.PostAggregator) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Map(java.util.Map) Result(io.druid.query.Result)

Example 20 with Result

use of io.druid.query.Result in project druid by druid-io.

the class SelectQueryEngine method process.

public Sequence<Result<SelectResultValue>> process(final SelectQuery query, final Segment segment) {
    final StorageAdapter adapter = segment.asStorageAdapter();
    if (adapter == null) {
        throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }
    // at the point where this code is called, only one datasource should exist.
    String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames());
    final Iterable<DimensionSpec> dims;
    if (query.getDimensions() == null || query.getDimensions().isEmpty()) {
        dims = DefaultDimensionSpec.toSpec(adapter.getAvailableDimensions());
    } else {
        dims = query.getDimensions();
    }
    final Iterable<String> metrics;
    if (query.getMetrics() == null || query.getMetrics().isEmpty()) {
        metrics = adapter.getAvailableMetrics();
    } else {
        metrics = query.getMetrics();
    }
    List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
    Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
    // should be rewritten with given interval
    final String segmentId = DataSegmentUtils.withInterval(dataSource, segment.getIdentifier(), intervals.get(0));
    final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
    return QueryRunnerHelper.makeCursorBasedQuery(adapter, query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>() {

        @Override
        public Result<SelectResultValue> apply(Cursor cursor) {
            final SelectResultValueBuilder builder = new SelectResultValueBuilder(cursor.getTime(), query.getPagingSpec(), query.isDescending());
            final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME);
            final List<ColumnSelectorPlus<SelectColumnSelectorStrategy>> selectorPlusList = Arrays.asList(DimensionHandlerUtils.createColumnSelectorPluses(STRATEGY_FACTORY, Lists.newArrayList(dims), cursor));
            for (DimensionSpec dimSpec : dims) {
                builder.addDimension(dimSpec.getOutputName());
            }
            final Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap();
            for (String metric : metrics) {
                final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
                metSelectors.put(metric, metricSelector);
                builder.addMetric(metric);
            }
            final PagingOffset offset = query.getPagingOffset(segmentId);
            cursor.advanceTo(offset.startDelta());
            int lastOffset = offset.startOffset();
            for (; !cursor.isDone() && offset.hasNext(); cursor.advance(), offset.next()) {
                final Map<String, Object> theEvent = singleEvent(EventHolder.timestampKey, timestampColumnSelector, selectorPlusList, metSelectors);
                builder.addEntry(new EventHolder(segmentId, lastOffset = offset.current(), theEvent));
            }
            builder.finished(segmentId, lastOffset);
            return builder.build();
        }
    });
}
Also used : DimensionSpec(io.druid.query.dimension.DimensionSpec) DefaultDimensionSpec(io.druid.query.dimension.DefaultDimensionSpec) StorageAdapter(io.druid.segment.StorageAdapter) Cursor(io.druid.segment.Cursor) Result(io.druid.query.Result) Filter(io.druid.query.filter.Filter) ISE(io.druid.java.util.common.ISE) LongColumnSelector(io.druid.segment.LongColumnSelector) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) Interval(org.joda.time.Interval) ObjectColumnSelector(io.druid.segment.ObjectColumnSelector)

Aggregations

Result (io.druid.query.Result)236 Test (org.junit.Test)164 DateTime (org.joda.time.DateTime)153 HashMap (java.util.HashMap)66 PostAggregator (io.druid.query.aggregation.PostAggregator)51 QueryRunner (io.druid.query.QueryRunner)49 Interval (org.joda.time.Interval)44 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)40 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)38 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)37 DoubleMaxAggregatorFactory (io.druid.query.aggregation.DoubleMaxAggregatorFactory)36 DoubleMinAggregatorFactory (io.druid.query.aggregation.DoubleMinAggregatorFactory)36 HyperUniqueFinalizingPostAggregator (io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator)35 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)34 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)31 ExtractionDimensionSpec (io.druid.query.dimension.ExtractionDimensionSpec)31 TopNResultValue (io.druid.query.topn.TopNResultValue)30 Map (java.util.Map)27 Benchmark (org.openjdk.jmh.annotations.Benchmark)27 BenchmarkMode (org.openjdk.jmh.annotations.BenchmarkMode)27