use of org.elasticsearch.search.aggregations.bucket.filters.InternalFilters in project elasticsearch by elastic.
the class FiltersAggsRewriteIT method testWrapperQueryIsRewritten.
public void testWrapperQueryIsRewritten() throws IOException {
createIndex("test", Settings.EMPTY, "test", "title", "type=text");
client().prepareIndex("test", "test", "1").setSource("title", "foo bar baz").get();
client().prepareIndex("test", "test", "2").setSource("title", "foo foo foo").get();
client().prepareIndex("test", "test", "3").setSource("title", "bar baz bax").get();
client().admin().indices().prepareRefresh("test").get();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference bytesReference;
try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) {
builder.startObject();
{
builder.startObject("terms");
{
builder.array("title", "foo");
}
builder.endObject();
}
builder.endObject();
bytesReference = builder.bytes();
}
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("titles", new FiltersAggregator.KeyedFilter("titleterms", new WrapperQueryBuilder(bytesReference)));
SearchResponse searchResponse = client().prepareSearch("test").setSize(0).addAggregation(builder).get();
assertEquals(3, searchResponse.getHits().getTotalHits());
InternalFilters filters = searchResponse.getAggregations().get("titles");
assertEquals(1, filters.getBuckets().size());
assertEquals(2, filters.getBuckets().get(0).getDocCount());
}
use of org.elasticsearch.search.aggregations.bucket.filters.InternalFilters in project elasticsearch by elastic.
the class FiltersAggregatorTests method testKeyedFilter.
public void testKeyedFilter() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document();
document.add(new Field("field", "foo", fieldType));
indexWriter.addDocument(document);
document.clear();
document.add(new Field("field", "else", fieldType));
indexWriter.addDocument(document);
// make sure we have more than one segment to test the merge
indexWriter.commit();
document.add(new Field("field", "foo", fieldType));
indexWriter.addDocument(document);
document.clear();
document.add(new Field("field", "bar", fieldType));
indexWriter.addDocument(document);
document.clear();
document.add(new Field("field", "foobar", fieldType));
indexWriter.addDocument(document);
indexWriter.commit();
document.clear();
document.add(new Field("field", "something", fieldType));
indexWriter.addDocument(document);
indexWriter.commit();
document.clear();
document.add(new Field("field", "foobar", fieldType));
indexWriter.addDocument(document);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
FiltersAggregator.KeyedFilter[] keys = new FiltersAggregator.KeyedFilter[6];
keys[0] = new FiltersAggregator.KeyedFilter("foobar", QueryBuilders.termQuery("field", "foobar"));
keys[1] = new FiltersAggregator.KeyedFilter("bar", QueryBuilders.termQuery("field", "bar"));
keys[2] = new FiltersAggregator.KeyedFilter("foo", QueryBuilders.termQuery("field", "foo"));
keys[3] = new FiltersAggregator.KeyedFilter("foo2", QueryBuilders.termQuery("field", "foo"));
keys[4] = new FiltersAggregator.KeyedFilter("same", QueryBuilders.termQuery("field", "foo"));
// filter name already present so it should be merge with the previous one ?
keys[5] = new FiltersAggregator.KeyedFilter("same", QueryBuilders.termQuery("field", "bar"));
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys);
builder.otherBucket(true);
builder.otherBucketKey("other");
for (boolean doReduce : new boolean[] { true, false }) {
final InternalFilters filters;
if (doReduce) {
filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
} else {
filters = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
}
assertEquals(filters.getBuckets().size(), 7);
assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
}
indexReader.close();
directory.close();
}
use of org.elasticsearch.search.aggregations.bucket.filters.InternalFilters in project elasticsearch by elastic.
the class FiltersAggregatorTests method testRandom.
public void testRandom() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
int numDocs = randomIntBetween(100, 200);
int maxTerm = randomIntBetween(10, 50);
int[] expectedBucketCount = new int[maxTerm];
Document document = new Document();
for (int i = 0; i < numDocs; i++) {
if (frequently()) {
// make sure we have more than one segment to test the merge
indexWriter.commit();
}
int value = randomInt(maxTerm - 1);
expectedBucketCount[value] += 1;
document.add(new Field("field", Integer.toString(value), fieldType));
indexWriter.addDocument(document);
document.clear();
}
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
int numFilters = randomIntBetween(1, 10);
QueryBuilder[] filters = new QueryBuilder[numFilters];
int[] filterTerms = new int[numFilters];
int expectedOtherCount = numDocs;
Set<Integer> filterSet = new HashSet<>();
for (int i = 0; i < filters.length; i++) {
int value = randomInt(maxTerm - 1);
filters[i] = QueryBuilders.termQuery("field", Integer.toString(value));
filterTerms[i] = value;
if (filterSet.contains(value) == false) {
expectedOtherCount -= expectedBucketCount[value];
filterSet.add(value);
}
}
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
builder.otherBucket(true);
builder.otherBucketKey("other");
for (boolean doReduce : new boolean[] { true, false }) {
final InternalFilters response;
if (doReduce) {
response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
} else {
response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
}
List<InternalFilters.InternalBucket> buckets = response.getBuckets();
assertEquals(buckets.size(), filters.length + 1);
for (InternalFilters.InternalBucket bucket : buckets) {
if ("other".equals(bucket.getKey())) {
assertEquals(bucket.getDocCount(), expectedOtherCount);
} else {
int index = Integer.parseInt(bucket.getKey());
assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
}
}
}
indexReader.close();
directory.close();
}
use of org.elasticsearch.search.aggregations.bucket.filters.InternalFilters in project elasticsearch by elastic.
the class FiltersAggregatorTests method testEmpty.
public void testEmpty() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
int numFilters = randomIntBetween(1, 10);
QueryBuilder[] filters = new QueryBuilder[numFilters];
for (int i = 0; i < filters.length; i++) {
filters[i] = QueryBuilders.termQuery("field", randomAsciiOfLength(5));
}
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
builder.otherBucketKey("other");
InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
assertEquals(response.getBuckets().size(), numFilters);
for (InternalFilters.InternalBucket filter : response.getBuckets()) {
assertEquals(filter.getDocCount(), 0);
}
indexReader.close();
directory.close();
}
Aggregations