use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder in project elasticsearch by elastic.
the class SearchModule method registerAggregations.
private void registerAggregations(List<SearchPlugin> plugins) {
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse).addResultReader(InternalAvg::new));
registerAggregation(new AggregationSpec(SumAggregationBuilder.NAME, SumAggregationBuilder::new, SumAggregationBuilder::parse).addResultReader(InternalSum::new));
registerAggregation(new AggregationSpec(MinAggregationBuilder.NAME, MinAggregationBuilder::new, MinAggregationBuilder::parse).addResultReader(InternalMin::new));
registerAggregation(new AggregationSpec(MaxAggregationBuilder.NAME, MaxAggregationBuilder::new, MaxAggregationBuilder::parse).addResultReader(InternalMax::new));
registerAggregation(new AggregationSpec(StatsAggregationBuilder.NAME, StatsAggregationBuilder::new, StatsAggregationBuilder::parse).addResultReader(InternalStats::new));
registerAggregation(new AggregationSpec(ExtendedStatsAggregationBuilder.NAME, ExtendedStatsAggregationBuilder::new, ExtendedStatsAggregationBuilder::parse).addResultReader(InternalExtendedStats::new));
registerAggregation(new AggregationSpec(ValueCountAggregationBuilder.NAME, ValueCountAggregationBuilder::new, ValueCountAggregationBuilder::parse).addResultReader(InternalValueCount::new));
registerAggregation(new AggregationSpec(PercentilesAggregationBuilder.NAME, PercentilesAggregationBuilder::new, PercentilesAggregationBuilder::parse).addResultReader(InternalTDigestPercentiles.NAME, InternalTDigestPercentiles::new).addResultReader(InternalHDRPercentiles.NAME, InternalHDRPercentiles::new));
registerAggregation(new AggregationSpec(PercentileRanksAggregationBuilder.NAME, PercentileRanksAggregationBuilder::new, PercentileRanksAggregationBuilder::parse).addResultReader(InternalTDigestPercentileRanks.NAME, InternalTDigestPercentileRanks::new).addResultReader(InternalHDRPercentileRanks.NAME, InternalHDRPercentileRanks::new));
registerAggregation(new AggregationSpec(CardinalityAggregationBuilder.NAME, CardinalityAggregationBuilder::new, CardinalityAggregationBuilder::parse).addResultReader(InternalCardinality::new));
registerAggregation(new AggregationSpec(GlobalAggregationBuilder.NAME, GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse).addResultReader(InternalGlobal::new));
registerAggregation(new AggregationSpec(MissingAggregationBuilder.NAME, MissingAggregationBuilder::new, MissingAggregationBuilder::parse).addResultReader(InternalMissing::new));
registerAggregation(new AggregationSpec(FilterAggregationBuilder.NAME, FilterAggregationBuilder::new, FilterAggregationBuilder::parse).addResultReader(InternalFilter::new));
registerAggregation(new AggregationSpec(FiltersAggregationBuilder.NAME, FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse).addResultReader(InternalFilters::new));
registerAggregation(new AggregationSpec(AdjacencyMatrixAggregationBuilder.NAME, AdjacencyMatrixAggregationBuilder::new, AdjacencyMatrixAggregationBuilder.getParser()).addResultReader(InternalAdjacencyMatrix::new));
registerAggregation(new AggregationSpec(SamplerAggregationBuilder.NAME, SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse).addResultReader(InternalSampler.NAME, InternalSampler::new).addResultReader(UnmappedSampler.NAME, UnmappedSampler::new));
registerAggregation(new AggregationSpec(DiversifiedAggregationBuilder.NAME, DiversifiedAggregationBuilder::new, DiversifiedAggregationBuilder::parse));
registerAggregation(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new, TermsAggregationBuilder::parse).addResultReader(StringTerms.NAME, StringTerms::new).addResultReader(UnmappedTerms.NAME, UnmappedTerms::new).addResultReader(LongTerms.NAME, LongTerms::new).addResultReader(DoubleTerms.NAME, DoubleTerms::new));
registerAggregation(new AggregationSpec(SignificantTermsAggregationBuilder.NAME, SignificantTermsAggregationBuilder::new, SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry)).addResultReader(SignificantStringTerms.NAME, SignificantStringTerms::new).addResultReader(SignificantLongTerms.NAME, SignificantLongTerms::new).addResultReader(UnmappedSignificantTerms.NAME, UnmappedSignificantTerms::new));
registerAggregation(new AggregationSpec(RangeAggregationBuilder.NAME, RangeAggregationBuilder::new, RangeAggregationBuilder::parse).addResultReader(InternalRange::new));
registerAggregation(new AggregationSpec(DateRangeAggregationBuilder.NAME, DateRangeAggregationBuilder::new, DateRangeAggregationBuilder::parse).addResultReader(InternalDateRange::new));
registerAggregation(new AggregationSpec(IpRangeAggregationBuilder.NAME, IpRangeAggregationBuilder::new, IpRangeAggregationBuilder::parse).addResultReader(InternalBinaryRange::new));
registerAggregation(new AggregationSpec(HistogramAggregationBuilder.NAME, HistogramAggregationBuilder::new, HistogramAggregationBuilder::parse).addResultReader(InternalHistogram::new));
registerAggregation(new AggregationSpec(DateHistogramAggregationBuilder.NAME, DateHistogramAggregationBuilder::new, DateHistogramAggregationBuilder::parse).addResultReader(InternalDateHistogram::new));
registerAggregation(new AggregationSpec(GeoDistanceAggregationBuilder.NAME, GeoDistanceAggregationBuilder::new, GeoDistanceAggregationBuilder::parse).addResultReader(InternalGeoDistance::new));
registerAggregation(new AggregationSpec(GeoGridAggregationBuilder.NAME, GeoGridAggregationBuilder::new, GeoGridAggregationBuilder::parse).addResultReader(InternalGeoHashGrid::new));
registerAggregation(new AggregationSpec(NestedAggregationBuilder.NAME, NestedAggregationBuilder::new, NestedAggregationBuilder::parse).addResultReader(InternalNested::new));
registerAggregation(new AggregationSpec(ReverseNestedAggregationBuilder.NAME, ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse).addResultReader(InternalReverseNested::new));
registerAggregation(new AggregationSpec(TopHitsAggregationBuilder.NAME, TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse).addResultReader(InternalTopHits::new));
registerAggregation(new AggregationSpec(GeoBoundsAggregationBuilder.NAME, GeoBoundsAggregationBuilder::new, GeoBoundsAggregationBuilder::parse).addResultReader(InternalGeoBounds::new));
registerAggregation(new AggregationSpec(GeoCentroidAggregationBuilder.NAME, GeoCentroidAggregationBuilder::new, GeoCentroidAggregationBuilder::parse).addResultReader(InternalGeoCentroid::new));
registerAggregation(new AggregationSpec(ScriptedMetricAggregationBuilder.NAME, ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse).addResultReader(InternalScriptedMetric::new));
registerAggregation(new AggregationSpec(ChildrenAggregationBuilder.NAME, ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse).addResultReader(InternalChildren::new));
registerFromPlugin(plugins, SearchPlugin::getAggregations, this::registerAggregation);
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder in project vertexium by visallo.
the class ElasticsearchSearchQueryBase method getElasticsearchHistogramAggregations.
protected List<AggregationBuilder> getElasticsearchHistogramAggregations(HistogramAggregation agg) {
List<AggregationBuilder> aggs = new ArrayList<>();
PropertyDefinition propertyDefinition = getPropertyDefinition(agg.getFieldName());
if (propertyDefinition == null) {
throw new VertexiumException("Could not find mapping for property: " + agg.getFieldName());
}
Class propertyDataType = propertyDefinition.getDataType();
for (String propertyName : getPropertyNames(agg.getFieldName())) {
String visibilityHash = getSearchIndex().getPropertyVisibilityHashFromPropertyName(propertyName);
String aggName = createAggregationName(agg.getAggregationName(), visibilityHash);
if (propertyDataType == Date.class) {
DateHistogramAggregationBuilder dateAgg = AggregationBuilders.dateHistogram(aggName);
dateAgg.field(propertyName);
String interval = agg.getInterval();
if (Pattern.matches("^[0-9\\.]+$", interval)) {
interval += "ms";
}
dateAgg.dateHistogramInterval(new DateHistogramInterval(interval));
dateAgg.minDocCount(1L);
if (agg.getMinDocumentCount() != null) {
dateAgg.minDocCount(agg.getMinDocumentCount());
}
if (agg.getExtendedBounds() != null) {
HistogramAggregation.ExtendedBounds<?> bounds = agg.getExtendedBounds();
if (bounds.getMinMaxType().isAssignableFrom(Long.class)) {
dateAgg.extendedBounds(new ExtendedBounds((Long) bounds.getMin(), (Long) bounds.getMax()));
} else if (bounds.getMinMaxType().isAssignableFrom(Date.class)) {
dateAgg.extendedBounds(new ExtendedBounds(new DateTime(bounds.getMin()).toString(), new DateTime(bounds.getMax()).toString()));
} else if (bounds.getMinMaxType().isAssignableFrom(String.class)) {
dateAgg.extendedBounds(new ExtendedBounds((String) bounds.getMin(), (String) bounds.getMax()));
} else {
throw new VertexiumException("Unhandled extended bounds type. Expected Long, String, or Date. Found: " + bounds.getMinMaxType().getName());
}
}
for (AggregationBuilder subAgg : getElasticsearchAggregations(agg.getNestedAggregations())) {
dateAgg.subAggregation(subAgg);
}
aggs.add(dateAgg);
} else {
HistogramAggregationBuilder histogramAgg = AggregationBuilders.histogram(aggName);
histogramAgg.field(propertyName);
histogramAgg.interval(Long.parseLong(agg.getInterval()));
histogramAgg.minDocCount(1L);
if (agg.getMinDocumentCount() != null) {
histogramAgg.minDocCount(agg.getMinDocumentCount());
}
if (agg.getExtendedBounds() != null) {
HistogramAggregation.ExtendedBounds<?> bounds = agg.getExtendedBounds();
if (bounds.getMinMaxType().isAssignableFrom(Long.class)) {
histogramAgg.extendedBounds((Long) bounds.getMin(), (Long) bounds.getMax());
} else {
throw new VertexiumException("Unhandled extended bounds type. Expected Long. Found: " + bounds.getMinMaxType().getName());
}
}
for (AggregationBuilder subAgg : getElasticsearchAggregations(agg.getNestedAggregations())) {
histogramAgg.subAggregation(subAgg);
}
aggs.add(histogramAgg);
}
}
return aggs;
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder in project elasticsearch by elastic.
the class DateHistogramTests method createTestAggregatorBuilder.
@Override
protected DateHistogramAggregationBuilder createTestAggregatorBuilder() {
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder("foo");
factory.field(INT_FIELD_NAME);
if (randomBoolean()) {
factory.interval(randomIntBetween(1, 100000));
} else {
if (randomBoolean()) {
factory.dateHistogramInterval(randomFrom(DateHistogramInterval.YEAR, DateHistogramInterval.QUARTER, DateHistogramInterval.MONTH, DateHistogramInterval.WEEK, DateHistogramInterval.DAY, DateHistogramInterval.HOUR, DateHistogramInterval.MINUTE, DateHistogramInterval.SECOND));
} else {
int branch = randomInt(4);
switch(branch) {
case 0:
factory.dateHistogramInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000)));
break;
case 1:
factory.dateHistogramInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000)));
break;
case 2:
factory.dateHistogramInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000)));
break;
case 3:
factory.dateHistogramInterval(DateHistogramInterval.days(randomIntBetween(1, 1000)));
break;
case 4:
factory.dateHistogramInterval(DateHistogramInterval.weeks(randomIntBetween(1, 1000)));
break;
default:
throw new IllegalStateException("invalid branch: " + branch);
}
}
}
if (randomBoolean()) {
factory.extendedBounds(ExtendedBoundsTests.randomExtendedBounds());
}
if (randomBoolean()) {
factory.format("###.##");
}
if (randomBoolean()) {
factory.keyed(randomBoolean());
}
if (randomBoolean()) {
factory.minDocCount(randomIntBetween(0, 100));
}
if (randomBoolean()) {
factory.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
factory.offset(randomIntBetween(0, 100000));
}
if (randomBoolean()) {
int branch = randomInt(5);
switch(branch) {
case 0:
factory.order(Order.COUNT_ASC);
break;
case 1:
factory.order(Order.COUNT_DESC);
break;
case 2:
factory.order(Order.KEY_ASC);
break;
case 3:
factory.order(Order.KEY_DESC);
break;
case 4:
factory.order(Order.aggregation("foo", true));
break;
case 5:
factory.order(Order.aggregation("foo", false));
break;
}
}
return factory;
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder in project graylog2-server by Graylog2.
the class ESTimeHandler method doCreateAggregation.
@Nonnull
@Override
public Optional<AggregationBuilder> doCreateAggregation(String name, Pivot pivot, Time timeSpec, ESPivot searchTypeHandler, ESGeneratedQueryContext esGeneratedQueryContext, Query query) {
final DateHistogramInterval dateHistogramInterval = new DateHistogramInterval(timeSpec.interval().toDateInterval(query.effectiveTimeRange(pivot)).toString());
final Optional<BucketOrder> ordering = orderForPivot(pivot, timeSpec, esGeneratedQueryContext);
final DateHistogramAggregationBuilder builder = AggregationBuilders.dateHistogram(name).dateHistogramInterval(dateHistogramInterval).field(timeSpec.field()).order(ordering.orElse(BucketOrder.key(true))).format("date_time");
record(esGeneratedQueryContext, pivot, timeSpec, name, DateHistogramAggregation.class);
return Optional.of(builder);
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder in project graylog2-server by Graylog2.
the class ESTimeHandler method doCreateAggregation.
@Nonnull
@Override
public Optional<AggregationBuilder> doCreateAggregation(String name, Pivot pivot, Time timeSpec, ESPivot searchTypeHandler, ESGeneratedQueryContext esGeneratedQueryContext, Query query) {
final DateHistogramInterval dateHistogramInterval = new DateHistogramInterval(timeSpec.interval().toDateInterval(query.effectiveTimeRange(pivot)).toString());
final Optional<BucketOrder> ordering = orderForPivot(pivot, timeSpec, esGeneratedQueryContext);
final DateHistogramAggregationBuilder builder = AggregationBuilders.dateHistogram(name).field(timeSpec.field()).order(ordering.orElse(BucketOrder.key(true))).format("date_time");
setInterval(builder, dateHistogramInterval);
record(esGeneratedQueryContext, pivot, timeSpec, name, ParsedDateHistogram.class);
return Optional.of(builder);
}
Aggregations