use of org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram in project apiman by apiman.
the class EsMetricsAccessor method getUsage.
/**
* @see io.apiman.manager.api.core.IMetricsAccessor#getUsage(java.lang.String, java.lang.String, java.lang.String, io.apiman.manager.api.beans.metrics.HistogramIntervalType, org.joda.time.DateTime, org.joda.time.DateTime)
*/
@SuppressWarnings("nls")
@Override
public UsageHistogramBean getUsage(String organizationId, String apiId, String version, HistogramIntervalType interval, DateTime from, DateTime to) {
UsageHistogramBean rval = new UsageHistogramBean();
Map<String, UsageDataPoint> index = MetricsAccessorHelper.generateHistogramSkeleton(rval, from, to, interval, UsageDataPoint.class);
try {
String query = "{" + " \"query\": {" + " \"bool\": {" + " \"filter\": [{" + " \"term\": {" + " \"apiOrgId\": \"{{apiOrgId}}\"" + " }" + " }, {" + " \"term\": {" + " \"apiId\": \"{{apiId}}\"" + " }" + " }, {" + " \"term\": {" + " \"apiVersion\": \"{{apiVersion}}\"" + " }" + " }," + " {" + " \"range\": {" + " \"requestStart\": {" + " \"gte\": \"{{from}}\"," + " \"lte\": \"{{to}}\"" + " }" + " }" + " }" + " ]" + " }" + " }," + " \"size\": 0," + " \"aggs\": {" + " \"histogram\": {" + " \"date_histogram\": {" + " \"field\": \"requestStart\"," + " \"calendar_interval\": \"{{interval}}\"" + " }" + " }" + " }" + "}";
Map<String, Object> params = new HashMap<>();
params.put("from", MetricsAccessorHelper.formatDate(from));
params.put("to", MetricsAccessorHelper.formatDate(to));
params.put("apiOrgId", organizationId.replace('"', '_'));
params.put("apiId", apiId.replace('"', '_'));
params.put("apiVersion", version.replace('"', '_'));
params.put("interval", interval.name());
SearchResponse response = this.doSearchTemplateRequest(query, params);
ParsedDateHistogram aggregation = (ParsedDateHistogram) response.getAggregations().asMap().get("histogram");
if (aggregation != null) {
List<ParsedDateHistogram.ParsedBucket> buckets = (List<ParsedDateHistogram.ParsedBucket>) aggregation.getBuckets();
for (ParsedDateHistogram.ParsedBucket entry : buckets) {
String keyAsString = entry.getKeyAsString();
if (index.containsKey(keyAsString)) {
index.get(keyAsString).setCount(entry.getDocCount());
}
}
}
} catch (IOException e) {
LOGGER.error(e);
}
return rval;
}
use of org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram in project vind by RBMHTechnology.
the class ResultUtils method getRangeFacetResults.
private static Pair<String, RangeFacetResult<?>> getRangeFacetResults(Aggregation aggregation, Facet rangeFacet) {
switch(rangeFacet.getType()) {
case "NumericRangeFacet":
final Facet.NumericRangeFacet numericRangeFacet = (Facet.NumericRangeFacet) rangeFacet;
final List<FacetValue> numericValues = new ArrayList<>();
Optional.ofNullable(aggregation).ifPresent(agg -> ((ParsedHistogram) ((ParsedRange) agg).getBuckets().get(0).getAggregations().getAsMap().get(rangeFacet.getFacetName())).getBuckets().stream().map(rangeBucket -> new FacetValue(DocumentUtil.castForDescriptor(rangeBucket.getKey(), numericRangeFacet.getFieldDescriptor(), FieldDescriptor.UseCase.Facet), rangeBucket.getDocCount())).forEach(numericValues::add));
return Pair.of(rangeFacet.getFacetName(), new RangeFacetResult(numericValues, numericRangeFacet.getStart(), numericRangeFacet.getEnd(), numericRangeFacet.getGap().longValue()));
default:
final Facet.DateRangeFacet dateRangeFacet = (Facet.DateRangeFacet) rangeFacet;
final List<FacetValue> dateValues = new ArrayList<>();
Optional.ofNullable(aggregation).ifPresent(agg -> ((ParsedDateHistogram) ((ParsedDateRange) agg).getBuckets().get(0).getAggregations().getAsMap().get(dateRangeFacet.getFacetName())).getBuckets().stream().map(rangeBucket -> new FacetValue(DocumentUtil.castForDescriptor(rangeBucket.getKey(), dateRangeFacet.getFieldDescriptor(), FieldDescriptor.UseCase.Facet), rangeBucket.getDocCount())).forEach(dateValues::add));
return Pair.of(rangeFacet.getFacetName(), new RangeFacetResult(dateValues, dateRangeFacet.getStart(), dateRangeFacet.getEnd(), dateRangeFacet.getGap().longValue()));
}
}
use of org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram in project apiman by apiman.
the class EsMetricsAccessor method getResponseStats.
/**
* @see io.apiman.manager.api.core.IMetricsAccessor#getResponseStats(java.lang.String, java.lang.String, java.lang.String, io.apiman.manager.api.beans.metrics.HistogramIntervalType, org.joda.time.DateTime, org.joda.time.DateTime)
*/
@SuppressWarnings("nls")
@Override
public ResponseStatsHistogramBean getResponseStats(String organizationId, String apiId, String version, HistogramIntervalType interval, DateTime from, DateTime to) {
ResponseStatsHistogramBean rval = new ResponseStatsHistogramBean();
Map<String, ResponseStatsDataPoint> index = MetricsAccessorHelper.generateHistogramSkeleton(rval, from, to, interval, ResponseStatsDataPoint.class);
try {
String query = "{" + " \"query\": {" + " \"bool\": {" + " \"filter\": [{" + " \"term\": {" + " \"apiOrgId\": \"{{apiOrgId}}\"" + " }" + " }, {" + " \"term\": {" + " \"apiId\": \"{{apiId}}\"" + " }" + " }, {" + " \"term\": {" + " \"apiVersion\": \"{{apiVersion}}\"" + " }" + " }, {" + " \"range\": {" + " \"requestStart\": {" + " \"gte\": \"{{from}}\"," + " \"lte\": \"{{to}}\"" + " }" + " }" + " }]" + " }" + " }," + " \"size\": 0," + " \"aggs\": {" + " \"histogram\": {" + " \"date_histogram\": {" + " \"field\": \"requestStart\"," + " \"calendar_interval\": \"{{interval}}\"" + " }," + " \"aggs\": {" + " \"total_failures\": {" + " \"filter\": {" + " \"term\": {" + " \"failure\": true" + " }" + " }" + " }," + " \"total_errors\": {" + " \"filter\": {" + " \"term\": {" + " \"error\": true" + " }" + " }" + " }" + " }" + " }" + " }" + "}";
Map<String, Object> params = new HashMap<>();
params.put("from", MetricsAccessorHelper.formatDate(from));
params.put("to", MetricsAccessorHelper.formatDate(to));
params.put("apiOrgId", organizationId.replace('"', '_'));
params.put("apiId", apiId.replace('"', '_'));
params.put("apiVersion", version.replace('"', '_'));
params.put("interval", interval.name());
SearchResponse searchResponse = this.doSearchTemplateRequest(query, params);
ParsedDateHistogram aggregation = (ParsedDateHistogram) searchResponse.getAggregations().asMap().get("histogram");
if (aggregation != null) {
List<ParsedDateHistogram.ParsedBucket> buckets = (List<ParsedDateHistogram.ParsedBucket>) aggregation.getBuckets();
for (ParsedDateHistogram.ParsedBucket entry : buckets) {
String keyAsString = entry.getKeyAsString();
if (index.containsKey(keyAsString)) {
ParsedFilter totalFailuresAgg = entry.getAggregations().get("total_failures");
ParsedFilter totalErrorsAgg = entry.getAggregations().get("total_errors");
long failures = totalFailuresAgg.getDocCount();
long errors = totalErrorsAgg.getDocCount();
ResponseStatsDataPoint point = index.get(keyAsString);
point.setTotal(entry.getDocCount());
point.setFailures(failures);
point.setErrors(errors);
}
}
}
} catch (IOException e) {
LOGGER.error(e);
}
return rval;
}
use of org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram in project resource-catalogue by madgeek-arc.
the class StatisticsManager method ratings.
@Override
public Map<String, Float> ratings(String id, Interval by) {
String dateFormat;
String aggregationName;
DateHistogramInterval dateHistogramInterval;
switch(StatisticsService.Interval.fromString(by.getKey())) {
case DAY:
dateFormat = "yyyy-MM-dd";
aggregationName = "day";
dateHistogramInterval = DateHistogramInterval.DAY;
break;
case WEEK:
dateFormat = "yyyy-MM-dd";
aggregationName = "week";
dateHistogramInterval = DateHistogramInterval.WEEK;
break;
case YEAR:
dateFormat = "yyyy";
aggregationName = "year";
dateHistogramInterval = DateHistogramInterval.YEAR;
break;
default:
dateFormat = "yyyy-MM";
aggregationName = "month";
dateHistogramInterval = DateHistogramInterval.MONTH;
}
DateHistogramAggregationBuilder dateHistogramAggregationBuilder = AggregationBuilders.dateHistogram(aggregationName).field("instant").calendarInterval(dateHistogramInterval).format(dateFormat).subAggregation(AggregationBuilders.sum("rating").field("value")).subAggregation(AggregationBuilders.count("rating_count").field("value")).subAggregation(PipelineAggregatorBuilders.cumulativeSum("cum_sum", "rating")).subAggregation(PipelineAggregatorBuilders.cumulativeSum("ratings_num", "rating_count"));
SearchRequest search = new SearchRequest("event");
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
search.searchType(SearchType.DFS_QUERY_THEN_FETCH);
searchSourceBuilder.query(getEventQueryBuilder(id, Event.UserActionType.RATING.getKey()));
searchSourceBuilder.aggregation(dateHistogramAggregationBuilder);
search.source(searchSourceBuilder);
SearchResponse response = null;
try {
response = client.search(search, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new ServiceException(e.getMessage());
}
List<? extends Histogram.Bucket> bucketsDay = ((ParsedDateHistogram) response.getAggregations().get(aggregationName)).getBuckets();
Map<String, Float> bucketMap = bucketsDay.stream().collect(Collectors.toMap(MultiBucketsAggregation.Bucket::getKeyAsString, e -> Float.parseFloat(((SimpleValue) e.getAggregations().get("cum_sum")).getValueAsString()) / Float.parseFloat(((SimpleValue) e.getAggregations().get("ratings_num")).getValueAsString())));
return new TreeMap<>(bucketMap);
}
use of org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram in project graylog2-server by Graylog2.
the class IndexToolsAdapterES7 method fieldHistogram.
@Override
public Map<DateTime, Map<String, Long>> fieldHistogram(String fieldName, Set<String> indices, Optional<Set<String>> includedStreams, long interval) {
final BoolQueryBuilder queryBuilder = buildStreamIdFilter(includedStreams);
final FilterAggregationBuilder the_filter = AggregationBuilders.filter(AGG_FILTER, queryBuilder).subAggregation(AggregationBuilders.dateHistogram(AGG_DATE_HISTOGRAM).field("timestamp").subAggregation(AggregationBuilders.terms(AGG_MESSAGE_FIELD).field(fieldName)).fixedInterval(new DateHistogramInterval(interval + "ms")).minDocCount(1L));
final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).aggregation(the_filter);
final SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(indices.toArray(new String[0]));
final SearchResponse searchResult = client.search(searchRequest, "Unable to retrieve field histogram.");
final Filter filterAggregation = searchResult.getAggregations().get(AGG_FILTER);
final ParsedDateHistogram dateHistogram = filterAggregation.getAggregations().get(AGG_DATE_HISTOGRAM);
final List<ParsedDateHistogram.ParsedBucket> histogramBuckets = (List<ParsedDateHistogram.ParsedBucket>) dateHistogram.getBuckets();
final Map<DateTime, Map<String, Long>> result = Maps.newHashMapWithExpectedSize(histogramBuckets.size());
for (ParsedDateHistogram.ParsedBucket bucket : histogramBuckets) {
final ZonedDateTime zonedDateTime = (ZonedDateTime) bucket.getKey();
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli()).toDateTime(DateTimeZone.UTC);
final Terms sourceFieldAgg = bucket.getAggregations().get(AGG_MESSAGE_FIELD);
final List<? extends Terms.Bucket> termBuckets = sourceFieldAgg.getBuckets();
final HashMap<String, Long> termCounts = Maps.newHashMapWithExpectedSize(termBuckets.size());
for (Terms.Bucket termBucket : termBuckets) {
termCounts.put(termBucket.getKeyAsString(), termBucket.getDocCount());
}
result.put(date, termCounts);
}
return ImmutableMap.copyOf(result);
}
Aggregations