use of org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHits in project metron by apache.
the class ElasticsearchRequestSubmitterTest method searchShouldHandleShardFailure.
@Test
public void searchShouldHandleShardFailure() throws InvalidSearchException, IOException {
// mocks
SearchResponse response = mock(SearchResponse.class);
SearchRequest request = new SearchRequest();
ShardSearchFailure fail = mock(ShardSearchFailure.class);
SearchShardTarget target = new SearchShardTarget("node1", mock(Index.class), 1, "metron");
// response will have status of OK
when(response.status()).thenReturn(RestStatus.OK);
// response will indicate 1 search hit
SearchHits hits = mock(SearchHits.class);
when(hits.getTotalHits()).thenReturn(1L);
// the response will report shard failures
when(response.getFailedShards()).thenReturn(1);
when(response.getTotalShards()).thenReturn(2);
when(response.getHits()).thenReturn(hits);
// the response will return the failures
ShardSearchFailure[] failures = { fail };
when(response.getShardFailures()).thenReturn(failures);
// shard failure needs to report the node
when(fail.shard()).thenReturn(target);
// shard failure needs to report details of failure
when(fail.index()).thenReturn("bro_index_2017-10-11");
when(fail.shardId()).thenReturn(1);
// search should succeed, even with failed shards
ElasticsearchRequestSubmitter submitter = setup(response);
SearchResponse actual = submitter.submitSearch(request);
assertNotNull(actual);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHits in project xmall by Exrick.
the class SearchServiceImpl method search.
/**
* 使用QueryBuilder
* termQuery("key", obj) 完全匹配
* termsQuery("key", obj1, obj2..) 一次匹配多个值
* matchQuery("key", Obj) 单个匹配, field不支持通配符, 前缀具高级特性
* multiMatchQuery("text", "field1", "field2"..); 匹配多个字段, field有通配符忒行
*/
@Override
public SearchResult search(String key, int page, int size, String sort, int priceGt, int priceLte) {
try {
Settings settings = Settings.builder().put("cluster.name", ES_CLUSTER_NAME).build();
TransportClient client = new PreBuiltTransportClient(settings).addTransportAddress(new TransportAddress(InetAddress.getByName(ES_CONNECT_IP), 9300));
SearchResult searchResult = new SearchResult();
// 设置查询条件
// 单字段搜索
QueryBuilder qb = matchQuery("productName", key);
// 设置分页
if (page <= 0) {
page = 1;
}
int start = (page - 1) * size;
// 设置高亮显示
HighlightBuilder hiBuilder = new HighlightBuilder();
hiBuilder.preTags("<a style=\"color: #e4393c\">");
hiBuilder.postTags("</a>");
hiBuilder.field("productName");
// 执行搜索
SearchResponse searchResponse = null;
if (priceGt >= 0 && priceLte >= 0 && sort.isEmpty()) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).setPostFilter(// 过滤条件
QueryBuilders.rangeQuery("salePrice").gt(priceGt).lt(priceLte)).get();
} else if (priceGt >= 0 && priceLte >= 0 && sort.equals("1")) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).setPostFilter(// 过滤条件
QueryBuilders.rangeQuery("salePrice").gt(priceGt).lt(priceLte)).addSort("salePrice", SortOrder.ASC).get();
} else if (priceGt >= 0 && priceLte >= 0 && sort.equals("-1")) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).setPostFilter(// 过滤条件
QueryBuilders.rangeQuery("salePrice").gt(priceGt).lt(priceLte)).addSort("salePrice", SortOrder.DESC).get();
} else if ((priceGt < 0 || priceLte < 0) && sort.isEmpty()) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).get();
} else if ((priceGt < 0 || priceLte < 0) && sort.equals("1")) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).addSort("salePrice", SortOrder.ASC).get();
} else if ((priceGt < 0 || priceLte < 0) && sort.equals("-1")) {
searchResponse = client.prepareSearch(ITEM_INDEX).setTypes(ITEM_TYPE).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(// Query
qb).setFrom(start).setSize(size).setExplain(// 从第几个开始,显示size个数据
true).highlighter(// 设置高亮显示
hiBuilder).addSort("salePrice", SortOrder.DESC).get();
}
SearchHits hits = searchResponse.getHits();
// 返回总结果数
searchResult.setRecordCount(hits.totalHits);
List<SearchItem> list = new ArrayList<>();
if (hits.totalHits > 0) {
for (SearchHit hit : hits) {
// 总页数
int totalPage = (int) (hit.getScore() / size);
if ((hit.getScore() % size) != 0) {
totalPage++;
}
// 返回结果总页数
searchResult.setTotalPages(totalPage);
// 设置高亮字段
SearchItem searchItem = new Gson().fromJson(hit.getSourceAsString(), SearchItem.class);
String productName = hit.getHighlightFields().get("productName").getFragments()[0].toString();
searchItem.setProductName(productName);
// 返回结果
list.add(searchItem);
}
}
searchResult.setItemList(list);
return searchResult;
} catch (Exception e) {
e.printStackTrace();
throw new XmallException("查询ES索引库出错");
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHits in project topcom-cloud by 545314690.
the class ElasticSearchService method search.
/**
* 功能描述:查询
* @param index 索引名
* @param type 类型
* @param constructor 查询构造
*/
public Page search(String index, String type, ESQueryBuilderConstructor constructor) {
List<Map<String, Object>> result = new ArrayList<>();
SearchRequestBuilder searchRequestBuilder = client.prepareSearch(index).setTypes(type);
// 排序
if (StringUtils.isNotEmpty(constructor.getAsc()))
searchRequestBuilder.addSort(constructor.getAsc(), SortOrder.ASC);
if (StringUtils.isNotEmpty(constructor.getDesc()))
searchRequestBuilder.addSort(constructor.getDesc(), SortOrder.DESC);
// 设置查询体
searchRequestBuilder.setQuery(constructor.listBuilders());
// 返回条目数
int size = constructor.getSize();
if (size < 0) {
size = 0;
}
if (size > MAX) {
size = MAX;
}
// 返回条目数
searchRequestBuilder.setSize(size);
searchRequestBuilder.setFrom(constructor.getFrom() < 0 ? 0 : constructor.getFrom());
SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
SearchHits hits = searchResponse.getHits();
SearchHit[] searchHists = hits.getHits();
for (SearchHit sh : searchHists) {
Map<String, Object> source = sh.getSource();
source.put("_id", sh.getId());
result.add(sh.getSource());
}
return new Page(result, hits.getTotalHits());
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHits in project elasticsearch by elastic.
the class SearchPhaseController method merge.
/**
* Enriches search hits and completion suggestion hits from <code>sortedDocs</code> using <code>fetchResultsArr</code>,
* merges suggestions, aggregations and profile results
*
* Expects sortedDocs to have top search docs across all shards, optionally followed by top suggest docs for each named
* completion suggestion ordered by suggestion name
*/
public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs, ReducedQueryPhase reducedQueryPhase, AtomicArray<? extends QuerySearchResultProvider> fetchResultsArr) {
if (reducedQueryPhase.isEmpty()) {
return InternalSearchResponse.empty();
}
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> fetchResults = fetchResultsArr.asList();
SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr);
if (reducedQueryPhase.suggest != null) {
if (!fetchResults.isEmpty()) {
int currentOffset = hits.getHits().length;
for (CompletionSuggestion suggestion : reducedQueryPhase.suggest.filter(CompletionSuggestion.class)) {
final List<CompletionSuggestion.Entry.Option> suggestionOptions = suggestion.getOptions();
for (int scoreDocIndex = currentOffset; scoreDocIndex < currentOffset + suggestionOptions.size(); scoreDocIndex++) {
ScoreDoc shardDoc = sortedDocs[scoreDocIndex];
QuerySearchResultProvider searchResultProvider = fetchResultsArr.get(shardDoc.shardIndex);
if (searchResultProvider == null) {
continue;
}
FetchSearchResult fetchResult = searchResultProvider.fetchResult();
int fetchResultIndex = fetchResult.counterGetAndIncrement();
if (fetchResultIndex < fetchResult.hits().internalHits().length) {
SearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
CompletionSuggestion.Entry.Option suggestOption = suggestionOptions.get(scoreDocIndex - currentOffset);
hit.score(shardDoc.score);
hit.shard(fetchResult.shardTarget());
suggestOption.setHit(hit);
}
}
currentOffset += suggestionOptions.size();
}
assert currentOffset == sortedDocs.length : "expected no more score doc slices";
}
}
return reducedQueryPhase.buildResponse(hits);
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.search.SearchHits in project elasticsearch by elastic.
the class ExpandSearchPhase method run.
@Override
public void run() throws IOException {
if (isCollapseRequest()) {
SearchRequest searchRequest = context.getRequest();
CollapseBuilder collapseBuilder = searchRequest.source().collapse();
MultiSearchRequest multiRequest = new MultiSearchRequest();
if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) {
multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests());
}
for (SearchHit hit : searchResponse.getHits()) {
BoolQueryBuilder groupQuery = new BoolQueryBuilder();
Object collapseValue = hit.field(collapseBuilder.getField()).getValue();
if (collapseValue != null) {
groupQuery.filter(QueryBuilders.matchQuery(collapseBuilder.getField(), collapseValue));
} else {
groupQuery.mustNot(QueryBuilders.existsQuery(collapseBuilder.getField()));
}
QueryBuilder origQuery = searchRequest.source().query();
if (origQuery != null) {
groupQuery.must(origQuery);
}
SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(collapseBuilder.getInnerHit()).query(groupQuery);
SearchRequest groupRequest = new SearchRequest(searchRequest.indices()).types(searchRequest.types()).source(sourceBuilder);
multiRequest.add(groupRequest);
}
context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), ActionListener.wrap(response -> {
Iterator<MultiSearchResponse.Item> it = response.iterator();
for (SearchHit hit : searchResponse.getHits()) {
MultiSearchResponse.Item item = it.next();
if (item.isFailure()) {
context.onPhaseFailure(this, "failed to expand hits", item.getFailure());
return;
}
SearchHits innerHits = item.getResponse().getHits();
if (hit.getInnerHits() == null) {
hit.setInnerHits(new HashMap<>(1));
}
hit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
}
context.executeNextPhase(this, nextPhaseFactory.apply(searchResponse));
}, context::onFailure));
} else {
context.executeNextPhase(this, nextPhaseFactory.apply(searchResponse));
}
}
Aggregations