use of org.apache.lucene.index.IndexReader in project elasticsearch by elastic.
the class ParentToChildrenAggregatorTests method testParentChild.
public void testParentChild() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
final Map<String, Tuple<Integer, Integer>> expectedParentChildRelations = setupIndex(indexWriter);
indexWriter.close();
IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), new ShardId(new Index("foo", "_na_"), 1));
// TODO set "maybeWrap" to true for IndexSearcher once #23338 is resolved
IndexSearcher indexSearcher = newSearcher(indexReader, false, true);
testCase(new MatchAllDocsQuery(), indexSearcher, child -> {
int expectedTotalChildren = 0;
int expectedMinValue = Integer.MAX_VALUE;
for (Tuple<Integer, Integer> expectedValues : expectedParentChildRelations.values()) {
expectedTotalChildren += expectedValues.v1();
expectedMinValue = Math.min(expectedMinValue, expectedValues.v2());
}
assertEquals(expectedTotalChildren, child.getDocCount());
assertEquals(expectedMinValue, ((InternalMin) child.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE);
});
for (String parent : expectedParentChildRelations.keySet()) {
testCase(new TermInSetQuery(UidFieldMapper.NAME, new BytesRef(Uid.createUid(PARENT_TYPE, parent))), indexSearcher, child -> {
assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount());
assertEquals(expectedParentChildRelations.get(parent).v2(), ((InternalMin) child.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE);
});
}
indexReader.close();
directory.close();
}
use of org.apache.lucene.index.IndexReader in project elasticsearch by elastic.
the class TermsAggregatorTests method testMixLongAndDouble.
public void testMixLongAndDouble() throws Exception {
for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) {
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG).executionHint(executionMode.toString()).field("number").order(Terms.Order.term(true));
List<InternalAggregation> aggs = new ArrayList<>();
int numLongs = randomIntBetween(1, 3);
for (int i = 0; i < numLongs; i++) {
final Directory dir;
try (IndexReader reader = createIndexWithLongs()) {
dir = ((DirectoryReader) reader).directory();
IndexSearcher searcher = new IndexSearcher(reader);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
fieldType.setHasDocValues(true);
aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher));
}
dir.close();
}
int numDoubles = randomIntBetween(1, 3);
for (int i = 0; i < numDoubles; i++) {
final Directory dir;
try (IndexReader reader = createIndexWithDoubles()) {
dir = ((DirectoryReader) reader).directory();
IndexSearcher searcher = new IndexSearcher(reader);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE);
fieldType.setName("number");
fieldType.setHasDocValues(true);
aggs.add(buildInternalAggregation(aggregationBuilder, fieldType, searcher));
}
dir.close();
}
InternalAggregation.ReduceContext ctx = new InternalAggregation.ReduceContext(new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()), null, true);
for (InternalAggregation internalAgg : aggs) {
InternalAggregation mergedAggs = internalAgg.doReduce(aggs, ctx);
assertTrue(mergedAggs instanceof DoubleTerms);
long expected = numLongs + numDoubles;
List<Terms.Bucket> buckets = ((DoubleTerms) mergedAggs).getBuckets();
assertEquals(4, buckets.size());
assertEquals("1.0", buckets.get(0).getKeyAsString());
assertEquals(expected, buckets.get(0).getDocCount());
assertEquals("10.0", buckets.get(1).getKeyAsString());
assertEquals(expected * 2, buckets.get(1).getDocCount());
assertEquals("100.0", buckets.get(2).getKeyAsString());
assertEquals(expected * 2, buckets.get(2).getDocCount());
assertEquals("1000.0", buckets.get(3).getKeyAsString());
assertEquals(expected, buckets.get(3).getDocCount());
}
}
}
use of org.apache.lucene.index.IndexReader in project elasticsearch by elastic.
the class MaxAggregatorTests method testCase.
private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalMax> verify) throws IOException {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
buildIndex.accept(indexWriter);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
try (MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
aggregator.preCollection();
indexSearcher.search(query, aggregator);
aggregator.postCollection();
verify.accept((InternalMax) aggregator.buildAggregation(0L));
}
indexReader.close();
directory.close();
}
use of org.apache.lucene.index.IndexReader in project elasticsearch by elastic.
the class StatsAggregatorTests method testCase.
public void testCase(MappedFieldType ft, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalStats> verify) throws IOException {
try (Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
buildIndex.accept(indexWriter);
try (IndexReader reader = indexWriter.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
StatsAggregationBuilder aggBuilder = new StatsAggregationBuilder("my_agg").field("field");
InternalStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
verify.accept(stats);
}
}
}
use of org.apache.lucene.index.IndexReader in project elasticsearch by elastic.
the class ValueCountAggregatorTests method testCase.
private void testCase(Query query, ValueType valueType, CheckedConsumer<RandomIndexWriter, IOException> indexer, Consumer<ValueCount> verify) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
indexer.accept(indexWriter);
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MappedFieldType fieldType = createMappedFieldType(valueType);
fieldType.setName(FIELD_NAME);
fieldType.setHasDocValues(true);
ValueCountAggregationBuilder aggregationBuilder = new ValueCountAggregationBuilder("_name", valueType);
aggregationBuilder.field(FIELD_NAME);
try (ValueCountAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType)) {
aggregator.preCollection();
indexSearcher.search(query, aggregator);
aggregator.postCollection();
verify.accept((ValueCount) aggregator.buildAggregation(0L));
}
}
}
}
Aggregations