use of org.opensearch.common.CheckedBiConsumer in project OpenSearch by opensearch-project.
the class RequestConvertersTests method testMultiSearch.
public void testMultiSearch() throws IOException {
int numberOfSearchRequests = randomIntBetween(0, 32);
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
for (int i = 0; i < numberOfSearchRequests; i++) {
SearchRequest searchRequest = randomSearchRequest(() -> {
// No need to return a very complex SearchSourceBuilder here, that is tested
// elsewhere
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.from(randomInt(10));
searchSourceBuilder.size(randomIntBetween(20, 100));
return searchSourceBuilder;
});
// scroll is not supported in the current msearch api, so unset it:
searchRequest.scroll((Scroll) null);
// only expand_wildcards, ignore_unavailable and allow_no_indices can be
// specified from msearch api, so unset other options:
IndicesOptions randomlyGenerated = searchRequest.indicesOptions();
IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions();
searchRequest.indicesOptions(IndicesOptions.fromOptions(randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), randomlyGenerated.expandWildcardsClosed(), msearchDefault.expandWildcardsHidden(), msearchDefault.allowAliasesToMultipleIndices(), msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases(), msearchDefault.ignoreThrottled()));
multiSearchRequest.add(searchRequest);
}
Map<String, String> expectedParams = new HashMap<>();
expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (randomBoolean()) {
multiSearchRequest.maxConcurrentSearchRequests(randomIntBetween(1, 8));
expectedParams.put("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests()));
}
Request request = RequestConverters.multiSearch(multiSearchRequest);
assertEquals("/_msearch", request.getEndpoint());
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals(expectedParams, request.getParameters());
List<SearchRequest> requests = new ArrayList<>();
CheckedBiConsumer<SearchRequest, XContentParser, IOException> consumer = (searchRequest, p) -> {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(p, false);
if (searchSourceBuilder.equals(new SearchSourceBuilder()) == false) {
searchRequest.source(searchSourceBuilder);
}
requests.add(searchRequest);
};
MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())), REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, null, xContentRegistry(), true, deprecationLogger);
assertEquals(requests, multiSearchRequest.requests());
}
use of org.opensearch.common.CheckedBiConsumer in project OpenSearch by opensearch-project.
the class AutoDateHistogramAggregatorTests method testAsSubAggInManyBuckets.
public void testAsSubAggInManyBuckets() throws IOException {
CheckedBiConsumer<RandomIndexWriter, DateFieldMapper.DateFieldType, IOException> buildIndex = (iw, dft) -> {
long start = dft.parse("2020-01-01T00:00:00Z");
long end = dft.parse("2021-01-01T00:00:00Z");
long anHour = dft.resolution().convert(Instant.ofEpochSecond(TimeUnit.HOURS.toSeconds(1)));
List<List<IndexableField>> docs = new ArrayList<>();
int n = 0;
for (long d = start; d < end; d += anHour) {
docs.add(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, d), new SortedNumericDocValuesField("n", n % 100)));
n++;
}
/*
* Intentionally add all documents at once to put them on the
* same shard to make the reduce behavior consistent.
*/
iw.addDocuments(docs);
};
AggregationBuilder builder = new HistogramAggregationBuilder("n").field("n").interval(1).subAggregation(new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).setNumBuckets(4).subAggregation(new MaxAggregationBuilder("max").field("n")));
asSubAggTestCase(builder, buildIndex, (InternalHistogram histo) -> {
assertThat(histo.getBuckets(), hasSize(100));
for (int n = 0; n < 100; n++) {
InternalHistogram.Bucket b = histo.getBuckets().get(n);
InternalAutoDateHistogram dh = b.getAggregations().get("dh");
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-01-01T00:00:00.000Z"), either(equalTo(21)).or(equalTo(22))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-04-01T00:00:00.000Z"), either(equalTo(21)).or(equalTo(22))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-07-01T00:00:00.000Z"), either(equalTo(22)).or(equalTo(23))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-10-01T00:00:00.000Z"), either(equalTo(22)).or(equalTo(23))));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-04-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-07-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-10-01T00:00:00.000Z", (double) n);
assertThat(maxAsMap(dh), equalTo(expectedMax));
}
});
}
use of org.opensearch.common.CheckedBiConsumer in project OpenSearch by opensearch-project.
the class AutoDateHistogramAggregatorTests method testAsSubAggWithIncreasedRounding.
public void testAsSubAggWithIncreasedRounding() throws IOException {
CheckedBiConsumer<RandomIndexWriter, DateFieldMapper.DateFieldType, IOException> buildIndex = (iw, dft) -> {
long start = dft.parse("2020-01-01T00:00:00Z");
long end = dft.parse("2021-01-01T00:00:00Z");
long useC = dft.parse("2020-07-01T00:00Z");
long anHour = dft.resolution().convert(Instant.ofEpochSecond(TimeUnit.HOURS.toSeconds(1)));
List<List<IndexableField>> docs = new ArrayList<>();
BytesRef aBytes = new BytesRef("a");
BytesRef bBytes = new BytesRef("b");
BytesRef cBytes = new BytesRef("c");
int n = 0;
for (long d = start; d < end; d += anHour) {
docs.add(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, d), new SortedSetDocValuesField("k1", aBytes), new SortedSetDocValuesField("k1", d < useC ? bBytes : cBytes), new SortedNumericDocValuesField("n", n++)));
}
/*
* Intentionally add all documents at once to put them on the
* same shard to make the reduce behavior consistent.
*/
iw.addDocuments(docs);
};
AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1").subAggregation(new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE).setNumBuckets(4).subAggregation(new MaxAggregationBuilder("max").field("n")));
asSubAggTestCase(builder, buildIndex, (StringTerms terms) -> {
StringTerms.Bucket a = terms.getBucketByKey("a");
InternalAutoDateHistogram adh = a.getAggregations().get("dh");
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-04-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-07-01T00:00:00.000Z", 2208);
expectedDocCount.put("2020-10-01T00:00:00.000Z", 2208);
assertThat(bucketCountsAsMap(adh), equalTo(expectedDocCount));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", 2183.0);
expectedMax.put("2020-04-01T00:00:00.000Z", 4367.0);
expectedMax.put("2020-07-01T00:00:00.000Z", 6575.0);
expectedMax.put("2020-10-01T00:00:00.000Z", 8783.0);
assertThat(maxAsMap(adh), equalTo(expectedMax));
StringTerms.Bucket b = terms.getBucketByKey("b");
InternalAutoDateHistogram bdh = b.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-04-01T00:00:00.000Z", 2184);
assertThat(bucketCountsAsMap(bdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-01-01T00:00:00.000Z", 2183.0);
expectedMax.put("2020-04-01T00:00:00.000Z", 4367.0);
assertThat(maxAsMap(bdh), equalTo(expectedMax));
StringTerms.Bucket c = terms.getBucketByKey("c");
InternalAutoDateHistogram cdh = c.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-07-01T00:00:00.000Z", 2208);
expectedDocCount.put("2020-10-01T00:00:00.000Z", 2208);
assertThat(bucketCountsAsMap(cdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-07-01T00:00:00.000Z", 6575.0);
expectedMax.put("2020-10-01T00:00:00.000Z", 8783.0);
assertThat(maxAsMap(cdh), equalTo(expectedMax));
});
}
use of org.opensearch.common.CheckedBiConsumer in project OpenSearch by opensearch-project.
the class DateHistogramAggregatorTestCase method asSubAggTestCase.
protected final <R extends InternalAggregation> void asSubAggTestCase(AggregationBuilder builder, Consumer<R> verify) throws IOException {
CheckedBiConsumer<RandomIndexWriter, DateFieldMapper.DateFieldType, IOException> buildIndex = (iw, dft) -> {
iw.addDocument(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), new SortedSetDocValuesField("k1", new BytesRef("a")), new SortedSetDocValuesField("k2", new BytesRef("a")), new SortedNumericDocValuesField("n", 1)));
iw.addDocument(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-03-01T00:00:00Z")), new SortedSetDocValuesField("k1", new BytesRef("a")), new SortedSetDocValuesField("k2", new BytesRef("a")), new SortedNumericDocValuesField("n", 2)));
iw.addDocument(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-02-01T00:00:00Z")), new SortedSetDocValuesField("k1", new BytesRef("a")), new SortedSetDocValuesField("k2", new BytesRef("a")), new SortedNumericDocValuesField("n", 3)));
iw.addDocument(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2021-03-01T00:00:00Z")), new SortedSetDocValuesField("k1", new BytesRef("a")), new SortedSetDocValuesField("k2", new BytesRef("b")), new SortedNumericDocValuesField("n", 4)));
iw.addDocument(org.opensearch.common.collect.List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, dft.parse("2020-02-01T00:00:00Z")), new SortedSetDocValuesField("k1", new BytesRef("b")), new SortedSetDocValuesField("k2", new BytesRef("b")), new SortedNumericDocValuesField("n", 5)));
};
asSubAggTestCase(builder, buildIndex, verify);
}
use of org.opensearch.common.CheckedBiConsumer in project OpenSearch by opensearch-project.
the class MultiSearchRequestTests method testMultiLineSerialization.
public void testMultiLineSerialization() throws IOException {
int iters = 16;
for (int i = 0; i < iters; i++) {
// The only formats that support stream separator
XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
MultiSearchRequest originalRequest = createMultiSearchRequest();
byte[] originalBytes = MultiSearchRequest.writeMultiLineFormat(originalRequest, xContentType.xContent());
MultiSearchRequest parsedRequest = new MultiSearchRequest();
CheckedBiConsumer<SearchRequest, XContentParser, IOException> consumer = (r, p) -> {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(p, false);
if (searchSourceBuilder.equals(new SearchSourceBuilder()) == false) {
r.source(searchSourceBuilder);
}
parsedRequest.add(r);
};
MultiSearchRequest.readMultiLineFormat(new BytesArray(originalBytes), xContentType.xContent(), consumer, null, null, null, null, null, xContentRegistry(), true, deprecationLogger);
assertEquals(originalRequest, parsedRequest);
}
}
Aggregations