use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValuedFieldOrderedByCountDesc.
public void testSingleValuedFieldOrderedByCountDesc() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH).order(Histogram.Order.COUNT_DESC)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(3));
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
i--;
}
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket in project elasticsearch by elastic.
the class DateHistogramIT method testMultiValuedField.
/*
[ Jan 2, Feb 3]
[ Feb 2, Mar 3]
[ Feb 15, Mar 16]
[ Mar 2, Apr 3]
[ Mar 15, Apr 16]
[ Mar 23, Apr 24]
*/
public void testMultiValuedField() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("dates").dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueFieldWithExtendedBounds.
public void testSingleValueFieldWithExtendedBounds() throws Exception {
String pattern = "yyyy-MM-dd";
// we're testing on days, so the base must be rounded to a day
// in days
int interval = randomIntBetween(1, 2);
long intervalMillis = interval * 24 * 60 * 60 * 1000;
DateTime base = new DateTime(DateTimeZone.UTC).dayOfMonth().roundFloorCopy();
DateTime baseKey = new DateTime(intervalMillis * (base.getMillis() / intervalMillis), DateTimeZone.UTC);
prepareCreate("idx2").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).execute().actionGet();
int numOfBuckets = randomIntBetween(3, 6);
// should be in the middle
int emptyBucketIndex = randomIntBetween(1, numOfBuckets - 2);
long[] docCounts = new long[numOfBuckets];
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numOfBuckets; i++) {
if (i == emptyBucketIndex) {
docCounts[i] = 0;
} else {
int docCount = randomIntBetween(1, 3);
for (int j = 0; j < docCount; j++) {
DateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
builders.add(indexDoc("idx2", date, j));
}
docCounts[i] = docCount;
}
}
indexRandom(true, builders);
ensureSearchable("idx2");
DateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
// randomizing the number of buckets on the min bound
// (can sometimes fall within the data range, but more frequently will fall before the data range)
int addedBucketsLeft = randomIntBetween(0, numOfBuckets);
DateTime boundsMinKey;
if (frequently()) {
boundsMinKey = baseKey.minusDays(addedBucketsLeft * interval);
} else {
boundsMinKey = baseKey.plusDays(addedBucketsLeft * interval);
addedBucketsLeft = 0;
}
DateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
// randomizing the number of buckets on the max bound
// (can sometimes fall within the data range, but more frequently will fall after the data range)
int addedBucketsRight = randomIntBetween(0, numOfBuckets);
int boundsMaxKeyDelta = addedBucketsRight * interval;
if (rarely()) {
addedBucketsRight = 0;
boundsMaxKeyDelta = -boundsMaxKeyDelta;
}
DateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
DateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
// it could be that the random bounds.min we chose ended up greater than
// bounds.max - this should
// trigger an error
boolean invalidBoundsError = boundsMin.isAfter(boundsMax);
// constructing the newly expected bucket list
int bucketsCount = numOfBuckets + addedBucketsLeft + addedBucketsRight;
long[] extendedValueCounts = new long[bucketsCount];
System.arraycopy(docCounts, 0, extendedValueCounts, addedBucketsLeft, docCounts.length);
SearchResponse response = null;
try {
response = client().prepareSearch("idx2").addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.days(interval)).minDocCount(0).extendedBounds(new ExtendedBounds(format(boundsMin, pattern), format(boundsMax, pattern))).format(pattern)).execute().actionGet();
if (invalidBoundsError) {
fail("Expected an exception to be thrown when bounds.min is greater than bounds.max");
return;
}
} catch (Exception e) {
if (invalidBoundsError) {
// expected
return;
} else {
throw e;
}
}
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(bucketsCount));
DateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
for (int i = 0; i < bucketsCount; i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern)));
assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i]));
key = key.plusDays(interval);
}
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueWithMultipleDateFormatsFromMapping.
public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception {
String mappingJson = jsonBuilder().startObject().startObject("type").startObject("properties").startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy").endObject().endObject().endObject().endObject().string();
prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", "10-03-2014").endObject());
}
indexRandom(true, reqs);
SearchResponse response = client().prepareSearch("idx2").setQuery(matchAllQuery()).addAggregation(dateHistogram("date_histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY)).execute().actionGet();
assertSearchHits(response, "0", "1", "2", "3", "4");
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(1));
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
}
use of org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValuedFieldWithSubAggregation.
public void testSingleValuedFieldWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value"))).execute().actionGet();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
assertThat(histo.getProperty("_bucket_count"), equalTo(3));
Object[] propertiesKeys = (Object[]) histo.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count");
Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value");
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(1.0));
assertThat((DateTime) propertiesKeys[0], equalTo(key));
assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(5.0));
assertThat((DateTime) propertiesKeys[1], equalTo(key));
assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(15.0));
assertThat((DateTime) propertiesKeys[2], equalTo(key));
assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
Aggregations