use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram in project elasticsearch by elastic.
the class PercentilesBucketIT method testDocCountAsSubAgg.
public void testDocCountAsSubAgg() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(terms("terms").field("tag").order(Terms.Order.term(true)).subAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue)).subAggregation(percentilesBucket("percentiles_bucket", "histo>_count").percents(PERCENTS))).execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
List<Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
double[] values = new double[numValueBuckets];
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
values[j] = bucket.getDocCount();
}
Arrays.sort(values);
PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentiles_bucket");
assertThat(percentilesBucketValue, notNullValue());
assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket"));
assertPercentileBucket(PERCENTS, values, percentilesBucketValue);
}
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram in project elasticsearch by elastic.
the class PercentilesBucketIT method testDocCountopLevel.
public void testDocCountopLevel() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue)).addAggregation(percentilesBucket("percentiles_bucket", "histo>_count").percents(PERCENTS)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
double[] values = new double[numValueBuckets];
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
values[i] = bucket.getDocCount();
}
Arrays.sort(values);
PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket");
assertThat(percentilesBucketValue, notNullValue());
assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket"));
assertPercentileBucket(PERCENTS, values, percentilesBucketValue);
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram in project elasticsearch by elastic.
the class DateDerivativeIT method testSingleValuedFieldNormalised_timeZone_CET_DstEnd.
/**
* Do a derivative on a date histogram with time zone CET at DST end
*/
public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Exception {
createIndex(IDX_DST_END);
DateTimeZone timezone = DateTimeZone.forID("CET");
List<IndexRequestBuilder> builders = new ArrayList<>();
addNTimes(1, IDX_DST_END, new DateTime("2012-10-27T01:00:00", timezone), builders);
// day with dst shift -1h, 25h long
addNTimes(2, IDX_DST_END, new DateTime("2012-10-28T01:00:00", timezone), builders);
addNTimes(3, IDX_DST_END, new DateTime("2012-10-29T01:00:00", timezone), builders);
addNTimes(4, IDX_DST_END, new DateTime("2012-10-30T01:00:00", timezone), builders);
indexRandom(true, builders);
ensureSearchable();
SearchResponse response = client().prepareSearch(IDX_DST_END).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).timeZone(timezone).minDocCount(0).subAggregation(derivative("deriv", "_count").unit(DateHistogramInterval.HOUR))).execute().actionGet();
assertSearchResponse(response);
Histogram deriv = response.getAggregations().get("histo");
assertThat(deriv, notNullValue());
assertThat(deriv.getName(), equalTo("histo"));
List<? extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
assertBucket(buckets.get(0), new DateTime("2012-10-27", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
assertBucket(buckets.get(1), new DateTime("2012-10-28", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
// the following is normalized using a 25h bucket width
assertBucket(buckets.get(2), new DateTime("2012-10-29", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 25d);
assertBucket(buckets.get(3), new DateTime("2012-10-30", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram in project elasticsearch by elastic.
the class DerivativeIT method testSingleValuedField_normalised.
/**
* test first and second derivative on the sing
*/
public void testSingleValuedField_normalised() {
SearchResponse response = client().prepareSearch("idx").addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).minDocCount(0).subAggregation(derivative("deriv", "_count").unit("1ms")).subAggregation(derivative("2nd_deriv", "deriv").unit("10ms"))).execute().actionGet();
assertSearchResponse(response);
Histogram deriv = response.getAggregations().get("histo");
assertThat(deriv, notNullValue());
assertThat(deriv.getName(), equalTo("histo"));
List<? extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i * interval, valueCounts[i]);
Derivative docCountDeriv = bucket.getAggregations().get("deriv");
if (i > 0) {
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), closeTo((firstDerivValueCounts[i - 1]), 0.00001));
assertThat(docCountDeriv.normalizedValue(), closeTo((double) (firstDerivValueCounts[i - 1]) / 5, 0.00001));
} else {
assertThat(docCountDeriv, nullValue());
}
Derivative docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv");
if (i > 1) {
assertThat(docCount2ndDeriv, notNullValue());
assertThat(docCount2ndDeriv.value(), closeTo((secondDerivValueCounts[i - 2]), 0.00001));
assertThat(docCount2ndDeriv.normalizedValue(), closeTo((double) (secondDerivValueCounts[i - 2]) * 2, 0.00001));
} else {
assertThat(docCount2ndDeriv, nullValue());
}
}
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram in project elasticsearch by elastic.
the class DerivativeIT method testSingleValueAggDerivativeWithGaps.
public void testSingleValueAggDerivativeWithGaps() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()).addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1).subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)).subAggregation(derivative("deriv", "sum"))).execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocsEmptyIdx));
Histogram deriv = searchResponse.getAggregations().get("histo");
assertThat(deriv, Matchers.notNullValue());
assertThat(deriv.getName(), equalTo("histo"));
List<Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(valueCounts_empty.length));
double lastSumValue = Double.NaN;
for (int i = 0; i < valueCounts_empty.length; i++) {
Histogram.Bucket bucket = buckets.get(i);
checkBucketKeyAndDocCount("InternalBucket " + i, bucket, i, valueCounts_empty[i]);
Sum sum = bucket.getAggregations().get("sum");
double thisSumValue = sum.value();
if (bucket.getDocCount() == 0) {
thisSumValue = Double.NaN;
}
SimpleValue sumDeriv = bucket.getAggregations().get("deriv");
if (i == 0) {
assertThat(sumDeriv, nullValue());
} else {
double expectedDerivative = thisSumValue - lastSumValue;
if (Double.isNaN(expectedDerivative)) {
assertThat(sumDeriv.value(), equalTo(expectedDerivative));
} else {
assertThat(sumDeriv.value(), closeTo(expectedDerivative, 0.00001));
}
}
lastSumValue = thisSumValue;
}
}
Aggregations