use of org.elasticsearch.action.index.IndexRequestBuilder in project elasticsearch by elastic.
the class GeoHashGridIT method setupSuiteScopeCluster.
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx_unmapped");
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("idx").setSettings(settings).addMapping("type", "location", "type=geo_point", "city", "type=keyword"));
List<IndexRequestBuilder> cities = new ArrayList<>();
Random random = random();
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
for (int i = 0; i < numDocs; i++) {
//generate random point
double lat = (180d * random.nextDouble()) - 90d;
double lng = (360d * random.nextDouble()) - 180d;
String randomGeoHash = stringEncode(lng, lat, PRECISION);
//Index at the highest resolution
cities.add(indexCity("idx", randomGeoHash, lat + ", " + lng));
expectedDocCountsForGeoHash.put(randomGeoHash, expectedDocCountsForGeoHash.getOrDefault(randomGeoHash, 0) + 1);
//Update expected doc counts for all resolutions..
for (int precision = PRECISION - 1; precision > 0; precision--) {
String hash = stringEncode(lng, lat, precision);
if ((smallestGeoHash == null) || (hash.length() < smallestGeoHash.length())) {
smallestGeoHash = hash;
}
expectedDocCountsForGeoHash.put(hash, expectedDocCountsForGeoHash.getOrDefault(hash, 0) + 1);
}
}
indexRandom(true, cities);
assertAcked(prepareCreate("multi_valued_idx").setSettings(settings).addMapping("type", "location", "type=geo_point", "city", "type=keyword"));
cities = new ArrayList<>();
multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
for (int i = 0; i < numDocs; i++) {
final int numPoints = random.nextInt(4);
List<String> points = new ArrayList<>();
Set<String> geoHashes = new HashSet<>();
for (int j = 0; j < numPoints; ++j) {
double lat = (180d * random.nextDouble()) - 90d;
double lng = (360d * random.nextDouble()) - 180d;
points.add(lat + "," + lng);
// Update expected doc counts for all resolutions..
for (int precision = PRECISION; precision > 0; precision--) {
final String geoHash = stringEncode(lng, lat, precision);
geoHashes.add(geoHash);
}
}
cities.add(indexCity("multi_valued_idx", Integer.toString(i), points));
for (String hash : geoHashes) {
multiValuedExpectedDocCountsForGeoHash.put(hash, multiValuedExpectedDocCountsForGeoHash.getOrDefault(hash, 0) + 1);
}
}
indexRandom(true, cities);
ensureSearchable();
}
use of org.elasticsearch.action.index.IndexRequestBuilder in project elasticsearch by elastic.
the class ChildrenIT method testPostCollectAllLeafReaders.
public void testPostCollectAllLeafReaders() throws Exception {
// The 'towns' and 'parent_names' aggs operate on parent docs and if child docs are in different segments we need
// to ensure those segments which child docs are also evaluated to in the post collect phase.
// Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused
// us to miss to evaluate child docs in segments we didn't have parent matches for.
assertAcked(prepareCreate("index").addMapping("parentType", "name", "type=keyword", "town", "type=keyword").addMapping("childType", "_parent", "type=parentType", "name", "type=keyword", "age", "type=integer"));
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("index", "parentType", "1").setSource("name", "Bob", "town", "Memphis"));
requests.add(client().prepareIndex("index", "parentType", "2").setSource("name", "Alice", "town", "Chicago"));
requests.add(client().prepareIndex("index", "parentType", "3").setSource("name", "Bill", "town", "Chicago"));
requests.add(client().prepareIndex("index", "childType", "1").setSource("name", "Jill", "age", 5).setParent("1"));
requests.add(client().prepareIndex("index", "childType", "2").setSource("name", "Joey", "age", 3).setParent("1"));
requests.add(client().prepareIndex("index", "childType", "3").setSource("name", "John", "age", 2).setParent("2"));
requests.add(client().prepareIndex("index", "childType", "4").setSource("name", "Betty", "age", 6).setParent("3"));
requests.add(client().prepareIndex("index", "childType", "5").setSource("name", "Dan", "age", 1).setParent("3"));
indexRandom(true, requests);
SearchResponse response = client().prepareSearch("index").setSize(0).addAggregation(AggregationBuilders.terms("towns").field("town").subAggregation(AggregationBuilders.terms("parent_names").field("name").subAggregation(AggregationBuilders.children("child_docs", "childType")))).get();
Terms towns = response.getAggregations().get("towns");
assertThat(towns.getBuckets().size(), equalTo(2));
assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago"));
assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L));
Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names");
assertThat(parents.getBuckets().size(), equalTo(2));
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice"));
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
Children children = parents.getBuckets().get(0).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(1L));
assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill"));
assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L));
children = parents.getBuckets().get(1).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(2L));
assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis"));
assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L));
parents = towns.getBuckets().get(1).getAggregations().get("parent_names");
assertThat(parents.getBuckets().size(), equalTo(1));
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob"));
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
children = parents.getBuckets().get(0).getAggregations().get("child_docs");
assertThat(children.getDocCount(), equalTo(2L));
}
use of org.elasticsearch.action.index.IndexRequestBuilder in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueWithTimeZone.
public void testSingleValueWithTimeZone() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(1);
}
indexRandom(true, reqs);
SearchResponse response = client().prepareSearch("idx2").setQuery(matchAllQuery()).addAggregation(dateHistogram("date_histo").field("date").timeZone(DateTimeZone.forID("-02:00")).dateHistogramInterval(DateHistogramInterval.DAY).format("yyyy-MM-dd:HH-mm-ssZZ")).execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10:00-00-00-02:00"));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11:00-00-00-02:00"));
assertThat(bucket.getDocCount(), equalTo(3L));
}
use of org.elasticsearch.action.index.IndexRequestBuilder in project elasticsearch by elastic.
the class AdjacencyMatrixIT method setupSuiteScopeCluster.
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
createIndex("idx2");
assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.getKey(), MAX_NUM_FILTERS)).get());
numDocs = randomIntBetween(5, 20);
numTag1Docs = randomIntBetween(1, numDocs - 1);
numTag2Docs = randomIntBetween(1, numDocs - numTag1Docs);
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numTag1Docs; i++) {
numSingleTag1Docs++;
XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject();
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
if (randomBoolean()) {
// randomly index the document twice so that we have deleted
// docs that match the filter
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
}
}
for (int i = numTag1Docs; i < (numTag1Docs + numTag2Docs); i++) {
numSingleTag2Docs++;
XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).field("tag", "tag2").endObject();
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
if (randomBoolean()) {
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
}
}
for (int i = numTag1Docs + numTag2Docs; i < numDocs; i++) {
numMultiTagDocs++;
numTag1Docs++;
numTag2Docs++;
XContentBuilder source = jsonBuilder().startObject().field("value", i + 1).array("tag", "tag1", "tag2").endObject();
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
if (randomBoolean()) {
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(source));
}
}
prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet();
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder().startObject().field("value", i * 2).endObject()));
}
indexRandom(true, builders);
ensureSearchable();
}
use of org.elasticsearch.action.index.IndexRequestBuilder in project elasticsearch by elastic.
the class DateHistogramIT method testSingleValueFieldWithExtendedBoundsTimezone.
/**
* Test date histogram aggregation with hour interval, timezone shift and
* extended bounds (see https://github.com/elastic/elasticsearch/issues/12278)
*/
public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
String index = "test12278";
prepareCreate(index).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).execute().actionGet();
DateMathParser parser = new DateMathParser(Joda.getStrictStandardDateFormatter());
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List<IndexRequestBuilder> builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
ensureSearchable(index);
SearchResponse response = null;
// retrieve those docs with the same time zone and extended bounds
response = client().prepareSearch(index).setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID())).addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))).execute().actionGet();
assertSearchResponse(response);
assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().getTotalHits(), equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(24));
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
assertThat(bucket.getDocCount(), equalTo(0L));
}
}
internalCluster().wipeIndices("test12278");
}
Aggregations