Search in sources :

Example 1 with HistogramFactory

use of org.opensearch.search.aggregations.bucket.histogram.HistogramFactory in project OpenSearch by opensearch-project.

the class CumulativeSumPipelineAggregator method reduce.

@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
    InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
    List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
    HistogramFactory factory = (HistogramFactory) histo;
    List<Bucket> newBuckets = new ArrayList<>(buckets.size());
    double sum = 0;
    for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
        Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);
        // Only increment the sum if it's a finite value, otherwise "increment by zero" is correct
        if (thisBucketValue != null && thisBucketValue.isInfinite() == false && thisBucketValue.isNaN() == false) {
            sum += thisBucketValue;
        }
        List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> (InternalAggregation) p).collect(Collectors.toList());
        aggs.add(new InternalSimpleValue(name(), sum, formatter, metadata()));
        Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
        newBuckets.add(newBucket);
    }
    return factory.createAggregation(newBuckets);
}
Also used : StreamInput(org.opensearch.common.io.stream.StreamInput) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) DocValueFormat(org.opensearch.search.DocValueFormat) ReduceContext(org.opensearch.search.aggregations.InternalAggregation.ReduceContext) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) StreamOutput(org.opensearch.common.io.stream.StreamOutput) IOException(java.io.IOException) GapPolicy(org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) List(java.util.List) BucketHelpers.resolveBucketValue(org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue) InternalAggregations(org.opensearch.search.aggregations.InternalAggregations) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) ArrayList(java.util.ArrayList) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation)

Example 2 with HistogramFactory

use of org.opensearch.search.aggregations.bucket.histogram.HistogramFactory in project OpenSearch by opensearch-project.

the class MovAvgPipelineAggregator method reduce.

@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
    InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
    List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
    HistogramFactory factory = (HistogramFactory) histo;
    List<Bucket> newBuckets = new ArrayList<>();
    EvictingQueue<Double> values = new EvictingQueue<>(this.window);
    Number lastValidKey = 0;
    int lastValidPosition = 0;
    int counter = 0;
    // Do we need to fit the model parameters to the data?
    if (minimize) {
        assert (model.canBeMinimized());
        model = minimize(buckets, histo, model);
    }
    for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
        Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
        // Default is to reuse existing bucket. Simplifies the rest of the logic,
        // since we only change newBucket if we can add to it
        Bucket newBucket = bucket;
        if ((thisBucketValue == null || thisBucketValue.equals(Double.NaN)) == false) {
            // Some models (e.g. HoltWinters) have certain preconditions that must be met
            if (model.hasValue(values.size())) {
                double movavg = model.next(values);
                List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> (InternalAggregation) p).collect(Collectors.toList());
                aggs.add(new InternalSimpleValue(name(), movavg, formatter, metadata()));
                newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
            }
            if (predict > 0) {
                lastValidKey = factory.getKey(bucket);
                lastValidPosition = counter;
            }
            values.offer(thisBucketValue);
        }
        counter += 1;
        newBuckets.add(newBucket);
    }
    if (buckets.size() > 0 && predict > 0) {
        double[] predictions = model.predict(values, predict);
        for (int i = 0; i < predictions.length; i++) {
            List<InternalAggregation> aggs;
            Number newKey = factory.nextKey(lastValidKey);
            if (lastValidPosition + i + 1 < newBuckets.size()) {
                Bucket bucket = newBuckets.get(lastValidPosition + i + 1);
                // Get the existing aggs in the bucket so we don't clobber data
                aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> (InternalAggregation) p).collect(Collectors.toList());
                aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, metadata()));
                Bucket newBucket = factory.createBucket(newKey, bucket.getDocCount(), InternalAggregations.from(aggs));
                // Overwrite the existing bucket with the new version
                newBuckets.set(lastValidPosition + i + 1, newBucket);
            } else {
                // Not seen before, create fresh
                aggs = new ArrayList<>();
                aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, metadata()));
                Bucket newBucket = factory.createBucket(newKey, 0, InternalAggregations.from(aggs));
                // Since this is a new bucket, simply append it
                newBuckets.add(newBucket);
            }
            lastValidKey = newKey;
        }
    }
    return factory.createAggregation(newBuckets);
}
Also used : StreamInput(org.opensearch.common.io.stream.StreamInput) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) ListIterator(java.util.ListIterator) DocValueFormat(org.opensearch.search.DocValueFormat) ReduceContext(org.opensearch.search.aggregations.InternalAggregation.ReduceContext) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) EvictingQueue(org.opensearch.common.collect.EvictingQueue) StreamOutput(org.opensearch.common.io.stream.StreamOutput) IOException(java.io.IOException) MultiBucketsAggregation(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation) GapPolicy(org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) List(java.util.List) BucketHelpers.resolveBucketValue(org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue) InternalAggregations(org.opensearch.search.aggregations.InternalAggregations) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) ArrayList(java.util.ArrayList) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) EvictingQueue(org.opensearch.common.collect.EvictingQueue) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation)

Example 3 with HistogramFactory

use of org.opensearch.search.aggregations.bucket.histogram.HistogramFactory in project OpenSearch by opensearch-project.

the class MovFnPipelineAggregator method reduce.

@Override
public InternalAggregation reduce(InternalAggregation aggregation, InternalAggregation.ReduceContext reduceContext) {
    InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
    List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
    HistogramFactory factory = (HistogramFactory) histo;
    List<MultiBucketsAggregation.Bucket> newBuckets = new ArrayList<>();
    // Initialize the script
    MovingFunctionScript.Factory scriptFactory = reduceContext.scriptService().compile(script, MovingFunctionScript.CONTEXT);
    Map<String, Object> vars = new HashMap<>();
    if (script.getParams() != null) {
        vars.putAll(script.getParams());
    }
    MovingFunctionScript executableScript = scriptFactory.newInstance();
    List<Double> values = buckets.stream().map(b -> resolveBucketValue(histo, b, bucketsPaths()[0], gapPolicy)).filter(v -> v != null && v.isNaN() == false).collect(Collectors.toList());
    int index = 0;
    for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
        Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
        // Default is to reuse existing bucket. Simplifies the rest of the logic,
        // since we only change newBucket if we can add to it
        MultiBucketsAggregation.Bucket newBucket = bucket;
        if (thisBucketValue != null && thisBucketValue.isNaN() == false) {
            // The custom context mandates that the script returns a double (not Double) so we
            // don't need null checks, etc.
            int fromIndex = clamp(index - window + shift, values);
            int toIndex = clamp(index + shift, values);
            double movavg = executableScript.execute(vars, values.subList(fromIndex, toIndex).stream().mapToDouble(Double::doubleValue).toArray());
            List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map(InternalAggregation.class::cast).collect(Collectors.toList());
            aggs.add(new InternalSimpleValue(name(), movavg, formatter, metadata()));
            newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
            index++;
        }
        newBuckets.add(newBucket);
    }
    return factory.createAggregation(newBuckets);
}
Also used : StreamInput(org.opensearch.common.io.stream.StreamInput) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) DocValueFormat(org.opensearch.search.DocValueFormat) Script(org.opensearch.script.Script) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) StreamOutput(org.opensearch.common.io.stream.StreamOutput) IOException(java.io.IOException) HashMap(java.util.HashMap) MultiBucketsAggregation(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) List(java.util.List) BucketHelpers.resolveBucketValue(org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue) LegacyESVersion(org.opensearch.LegacyESVersion) InternalAggregations(org.opensearch.search.aggregations.InternalAggregations) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) MultiBucketsAggregation(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation)

Example 4 with HistogramFactory

use of org.opensearch.search.aggregations.bucket.histogram.HistogramFactory in project OpenSearch by opensearch-project.

the class SerialDiffPipelineAggregator method reduce.

@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
    InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
    List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
    HistogramFactory factory = (HistogramFactory) histo;
    List<Bucket> newBuckets = new ArrayList<>();
    EvictingQueue<Double> lagWindow = new EvictingQueue<>(lag);
    int counter = 0;
    for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
        Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
        Bucket newBucket = bucket;
        counter += 1;
        // Still under the initial lag period, add nothing and move on
        Double lagValue;
        if (counter <= lag) {
            lagValue = Double.NaN;
        } else {
            // Peek here, because we rely on add'ing to always move the window
            lagValue = lagWindow.peek();
        }
        // Normalize null's to NaN
        if (thisBucketValue == null) {
            thisBucketValue = Double.NaN;
        }
        // Both have values, calculate diff and replace the "empty" bucket
        if (!Double.isNaN(thisBucketValue) && !Double.isNaN(lagValue)) {
            double diff = thisBucketValue - lagValue;
            List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> (InternalAggregation) p).collect(Collectors.toList());
            aggs.add(new InternalSimpleValue(name(), diff, formatter, metadata()));
            newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
        }
        newBuckets.add(newBucket);
        lagWindow.add(thisBucketValue);
    }
    return factory.createAggregation(newBuckets);
}
Also used : StreamInput(org.opensearch.common.io.stream.StreamInput) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) DocValueFormat(org.opensearch.search.DocValueFormat) ReduceContext(org.opensearch.search.aggregations.InternalAggregation.ReduceContext) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) EvictingQueue(org.opensearch.common.collect.EvictingQueue) StreamOutput(org.opensearch.common.io.stream.StreamOutput) IOException(java.io.IOException) GapPolicy(org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy) Collectors(java.util.stream.Collectors) Nullable(org.opensearch.common.Nullable) ArrayList(java.util.ArrayList) List(java.util.List) BucketHelpers.resolveBucketValue(org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue) InternalAggregations(org.opensearch.search.aggregations.InternalAggregations) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) ArrayList(java.util.ArrayList) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) EvictingQueue(org.opensearch.common.collect.EvictingQueue) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation)

Example 5 with HistogramFactory

use of org.opensearch.search.aggregations.bucket.histogram.HistogramFactory in project OpenSearch by opensearch-project.

the class DerivativePipelineAggregator method reduce.

@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
    InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation;
    List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
    HistogramFactory factory = (HistogramFactory) histo;
    List<Bucket> newBuckets = new ArrayList<>();
    Number lastBucketKey = null;
    Double lastBucketValue = null;
    for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
        Number thisBucketKey = factory.getKey(bucket);
        Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
        if (lastBucketValue != null && thisBucketValue != null) {
            double gradient = thisBucketValue - lastBucketValue;
            double xDiff = -1;
            if (xAxisUnits != null) {
                xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits;
            }
            final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> {
                return (InternalAggregation) p;
            }).collect(Collectors.toList());
            aggs.add(new InternalDerivative(name(), gradient, xDiff, formatter, metadata()));
            Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
            newBuckets.add(newBucket);
        } else {
            newBuckets.add(bucket);
        }
        lastBucketKey = thisBucketKey;
        lastBucketValue = thisBucketValue;
    }
    return factory.createAggregation(newBuckets);
}
Also used : StreamInput(org.opensearch.common.io.stream.StreamInput) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) DocValueFormat(org.opensearch.search.DocValueFormat) ReduceContext(org.opensearch.search.aggregations.InternalAggregation.ReduceContext) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation) StreamOutput(org.opensearch.common.io.stream.StreamOutput) IOException(java.io.IOException) GapPolicy(org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) List(java.util.List) BucketHelpers.resolveBucketValue(org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue) InternalAggregations(org.opensearch.search.aggregations.InternalAggregations) Map(java.util.Map) StreamSupport(java.util.stream.StreamSupport) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) ArrayList(java.util.ArrayList) HistogramFactory(org.opensearch.search.aggregations.bucket.histogram.HistogramFactory) InternalAggregation(org.opensearch.search.aggregations.InternalAggregation) Bucket(org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket) InternalMultiBucketAggregation(org.opensearch.search.aggregations.InternalMultiBucketAggregation)

Aggregations

IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 List (java.util.List)5 Map (java.util.Map)5 Collectors (java.util.stream.Collectors)5 StreamSupport (java.util.stream.StreamSupport)5 StreamInput (org.opensearch.common.io.stream.StreamInput)5 StreamOutput (org.opensearch.common.io.stream.StreamOutput)5 DocValueFormat (org.opensearch.search.DocValueFormat)5 InternalAggregation (org.opensearch.search.aggregations.InternalAggregation)5 InternalAggregations (org.opensearch.search.aggregations.InternalAggregations)5 InternalMultiBucketAggregation (org.opensearch.search.aggregations.InternalMultiBucketAggregation)5 HistogramFactory (org.opensearch.search.aggregations.bucket.histogram.HistogramFactory)5 BucketHelpers.resolveBucketValue (org.opensearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue)5 ReduceContext (org.opensearch.search.aggregations.InternalAggregation.ReduceContext)4 Bucket (org.opensearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket)4 GapPolicy (org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy)4 EvictingQueue (org.opensearch.common.collect.EvictingQueue)2 MultiBucketsAggregation (org.opensearch.search.aggregations.bucket.MultiBucketsAggregation)2 HashMap (java.util.HashMap)1