use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval in project vertexium by visallo.
the class ElasticsearchSearchQueryBase method getElasticsearchHistogramAggregations.
protected List<AggregationBuilder> getElasticsearchHistogramAggregations(HistogramAggregation agg) {
List<AggregationBuilder> aggs = new ArrayList<>();
PropertyDefinition propertyDefinition = getPropertyDefinition(agg.getFieldName());
if (propertyDefinition == null) {
throw new VertexiumException("Could not find mapping for property: " + agg.getFieldName());
}
Class propertyDataType = propertyDefinition.getDataType();
for (String propertyName : getPropertyNames(agg.getFieldName())) {
String visibilityHash = getSearchIndex().getPropertyVisibilityHashFromPropertyName(propertyName);
String aggName = createAggregationName(agg.getAggregationName(), visibilityHash);
if (propertyDataType == Date.class) {
DateHistogramAggregationBuilder dateAgg = AggregationBuilders.dateHistogram(aggName);
dateAgg.field(propertyName);
String interval = agg.getInterval();
if (Pattern.matches("^[0-9\\.]+$", interval)) {
interval += "ms";
}
dateAgg.dateHistogramInterval(new DateHistogramInterval(interval));
dateAgg.minDocCount(1L);
if (agg.getMinDocumentCount() != null) {
dateAgg.minDocCount(agg.getMinDocumentCount());
}
if (agg.getExtendedBounds() != null) {
HistogramAggregation.ExtendedBounds<?> bounds = agg.getExtendedBounds();
if (bounds.getMinMaxType().isAssignableFrom(Long.class)) {
dateAgg.extendedBounds(new ExtendedBounds((Long) bounds.getMin(), (Long) bounds.getMax()));
} else if (bounds.getMinMaxType().isAssignableFrom(Date.class)) {
dateAgg.extendedBounds(new ExtendedBounds(new DateTime(bounds.getMin()).toString(), new DateTime(bounds.getMax()).toString()));
} else if (bounds.getMinMaxType().isAssignableFrom(String.class)) {
dateAgg.extendedBounds(new ExtendedBounds((String) bounds.getMin(), (String) bounds.getMax()));
} else {
throw new VertexiumException("Unhandled extended bounds type. Expected Long, String, or Date. Found: " + bounds.getMinMaxType().getName());
}
}
for (AggregationBuilder subAgg : getElasticsearchAggregations(agg.getNestedAggregations())) {
dateAgg.subAggregation(subAgg);
}
aggs.add(dateAgg);
} else {
HistogramAggregationBuilder histogramAgg = AggregationBuilders.histogram(aggName);
histogramAgg.field(propertyName);
histogramAgg.interval(Long.parseLong(agg.getInterval()));
histogramAgg.minDocCount(1L);
if (agg.getMinDocumentCount() != null) {
histogramAgg.minDocCount(agg.getMinDocumentCount());
}
if (agg.getExtendedBounds() != null) {
HistogramAggregation.ExtendedBounds<?> bounds = agg.getExtendedBounds();
if (bounds.getMinMaxType().isAssignableFrom(Long.class)) {
histogramAgg.extendedBounds((Long) bounds.getMin(), (Long) bounds.getMax());
} else {
throw new VertexiumException("Unhandled extended bounds type. Expected Long. Found: " + bounds.getMinMaxType().getName());
}
}
for (AggregationBuilder subAgg : getElasticsearchAggregations(agg.getNestedAggregations())) {
histogramAgg.subAggregation(subAgg);
}
aggs.add(histogramAgg);
}
}
return aggs;
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval in project sonarqube by SonarSource.
the class IssueIndex method getCreatedAtFacet.
private Optional<AggregationBuilder> getCreatedAtFacet(IssueQuery query, Map<String, QueryBuilder> filters, QueryBuilder esQuery) {
long startTime;
Date createdAfter = query.createdAfter();
if (createdAfter == null) {
Optional<Long> minDate = getMinCreatedAt(filters, esQuery);
if (!minDate.isPresent()) {
return Optional.empty();
}
startTime = minDate.get();
} else {
startTime = createdAfter.getTime();
}
Date createdBefore = query.createdBefore();
long endTime = createdBefore == null ? system.now() : createdBefore.getTime();
Duration timeSpan = new Duration(startTime, endTime);
DateHistogramInterval bucketSize = DateHistogramInterval.YEAR;
if (timeSpan.isShorterThan(TWENTY_DAYS)) {
bucketSize = DateHistogramInterval.DAY;
} else if (timeSpan.isShorterThan(TWENTY_WEEKS)) {
bucketSize = DateHistogramInterval.WEEK;
} else if (timeSpan.isShorterThan(TWENTY_MONTHS)) {
bucketSize = DateHistogramInterval.MONTH;
}
// from GMT to server TZ
int offsetInSeconds = -system.getDefaultTimeZone().getRawOffset() / 1_000;
AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(PARAM_CREATED_AT).field(IssueIndexDefinition.FIELD_ISSUE_FUNC_CREATED_AT).interval(bucketSize).minDocCount(0L).format(DateUtils.DATETIME_FORMAT).timeZone(TimeZone.getTimeZone("GMT").getID()).offset(offsetInSeconds + "s").extendedBounds(startTime, endTime - 1_000L);
dateHistogram = addEffortAggregationIfNeeded(query, dateHistogram);
return Optional.of(dateHistogram);
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval in project sonarqube by SonarSource.
the class IssueIndex method getCreatedAtFacet.
private Optional<AggregationBuilder> getCreatedAtFacet(IssueQuery query, TopAggregationHelper aggregationHelper, AllFilters allFilters) {
long startTime;
boolean startInclusive;
PeriodStart createdAfter = query.createdAfter();
if (createdAfter == null) {
OptionalLong minDate = getMinCreatedAt(allFilters);
if (!minDate.isPresent()) {
return Optional.empty();
}
startTime = minDate.getAsLong();
startInclusive = true;
} else {
startTime = createdAfter.date().getTime();
startInclusive = createdAfter.inclusive();
}
Date createdBefore = query.createdBefore();
long endTime = createdBefore == null ? system.now() : createdBefore.getTime();
Duration timeSpan = new Duration(startTime, endTime);
DateHistogramInterval bucketSize = computeDateHistogramBucketSize(timeSpan);
FilterAggregationBuilder topAggregation = aggregationHelper.buildTopAggregation(CREATED_AT.getName(), CREATED_AT.getTopAggregationDef(), NO_EXTRA_FILTER, t -> {
AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(CREATED_AT.getName()).field(CREATED_AT.getFieldName()).dateHistogramInterval(bucketSize).minDocCount(0L).format(DateUtils.DATETIME_FORMAT).timeZone(Optional.ofNullable(query.timeZone()).orElse(system.getDefaultTimeZone().toZoneId())).extendedBounds(new LongBounds(startInclusive ? startTime : (startTime + 1), endTime - 1L));
addEffortAggregationIfNeeded(query, dateHistogram);
t.subAggregation(dateHistogram);
});
return Optional.of(topAggregation);
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval in project graylog2-server by Graylog2.
the class ESTimeHandler method doCreateAggregation.
@Nonnull
@Override
public Optional<AggregationBuilder> doCreateAggregation(String name, Pivot pivot, Time timeSpec, ESPivot searchTypeHandler, ESGeneratedQueryContext esGeneratedQueryContext, Query query) {
final DateHistogramInterval dateHistogramInterval = new DateHistogramInterval(timeSpec.interval().toDateInterval(query.effectiveTimeRange(pivot)).toString());
final Optional<BucketOrder> ordering = orderForPivot(pivot, timeSpec, esGeneratedQueryContext);
final DateHistogramAggregationBuilder builder = AggregationBuilders.dateHistogram(name).dateHistogramInterval(dateHistogramInterval).field(timeSpec.field()).order(ordering.orElse(BucketOrder.key(true))).format("date_time");
record(esGeneratedQueryContext, pivot, timeSpec, name, DateHistogramAggregation.class);
return Optional.of(builder);
}
use of org.graylog.shaded.elasticsearch6.org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval in project graylog2-server by Graylog2.
the class IndexToolsAdapterES7 method fieldHistogram.
@Override
public Map<DateTime, Map<String, Long>> fieldHistogram(String fieldName, Set<String> indices, Optional<Set<String>> includedStreams, long interval) {
final BoolQueryBuilder queryBuilder = buildStreamIdFilter(includedStreams);
final FilterAggregationBuilder the_filter = AggregationBuilders.filter(AGG_FILTER, queryBuilder).subAggregation(AggregationBuilders.dateHistogram(AGG_DATE_HISTOGRAM).field("timestamp").subAggregation(AggregationBuilders.terms(AGG_MESSAGE_FIELD).field(fieldName)).fixedInterval(new DateHistogramInterval(interval + "ms")).minDocCount(1L));
final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).aggregation(the_filter);
final SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(indices.toArray(new String[0]));
final SearchResponse searchResult = client.search(searchRequest, "Unable to retrieve field histogram.");
final Filter filterAggregation = searchResult.getAggregations().get(AGG_FILTER);
final ParsedDateHistogram dateHistogram = filterAggregation.getAggregations().get(AGG_DATE_HISTOGRAM);
final List<ParsedDateHistogram.ParsedBucket> histogramBuckets = (List<ParsedDateHistogram.ParsedBucket>) dateHistogram.getBuckets();
final Map<DateTime, Map<String, Long>> result = Maps.newHashMapWithExpectedSize(histogramBuckets.size());
for (ParsedDateHistogram.ParsedBucket bucket : histogramBuckets) {
final ZonedDateTime zonedDateTime = (ZonedDateTime) bucket.getKey();
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli()).toDateTime(DateTimeZone.UTC);
final Terms sourceFieldAgg = bucket.getAggregations().get(AGG_MESSAGE_FIELD);
final List<? extends Terms.Bucket> termBuckets = sourceFieldAgg.getBuckets();
final HashMap<String, Long> termCounts = Maps.newHashMapWithExpectedSize(termBuckets.size());
for (Terms.Bucket termBucket : termBuckets) {
termCounts.put(termBucket.getKeyAsString(), termBucket.getDocCount());
}
result.put(date, termCounts);
}
return ImmutableMap.copyOf(result);
}
Aggregations