use of org.elasticsearch.search.aggregations.Aggregations in project herd by FINRAOS.
the class ElasticsearchHelper method getAggregation.
/**
* Returns the aggregation that is associated with the specified name. This method also validates that the retrieved aggregation exists.
*
* @param searchResponse the response of the search request
* @param aggregationName the name of the aggregation
*
* @return the aggregation
*/
public Terms getAggregation(SearchResponse searchResponse, String aggregationName) {
// Retrieve the aggregations from the search response.
Aggregations aggregations = getAggregationsFromSearchResponse(searchResponse);
// Retrieve the specified aggregation.
Terms aggregation = aggregations.get(aggregationName);
// Fail if retrieved aggregation is null.
if (aggregation == null) {
// Log the error along with the search response contents.
LOGGER.error("Failed to retrieve \"{}\" aggregation from the search response. searchResponse={}", aggregationName, jsonHelper.objectToJson(searchResponse));
// Throw an exception.
throw new IllegalStateException("Invalid search result.");
}
return aggregation;
}
use of org.elasticsearch.search.aggregations.Aggregations in project stash-codesearch-plugin by palantir.
the class SearchServlet method doGet.
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
// Make sure user is logged in
try {
validationService.validateAuthenticated();
} catch (AuthorisationException notLoggedInException) {
try {
resp.sendRedirect(propertiesService.getLoginUri(URI.create(req.getRequestURL() + (req.getQueryString() == null ? "" : "?" + req.getQueryString()))).toASCIIString());
} catch (Exception e) {
log.error("Unable to redirect unauthenticated user to login page", e);
}
return;
}
// Query and parse settings
SearchParams params = SearchParams.getParams(req, DateTimeZone.forTimeZone(propertiesService.getDefaultTimeZone()));
GlobalSettings globalSettings = settingsManager.getGlobalSettings();
ImmutableSet.Builder<String> noHighlightBuilder = new ImmutableSet.Builder<String>();
for (String extension : globalSettings.getNoHighlightExtensions().split(",")) {
extension = extension.trim().toLowerCase();
if (!extension.isEmpty()) {
noHighlightBuilder.add(extension);
}
}
ImmutableSet<String> noHighlight = noHighlightBuilder.build();
int maxPreviewLines = globalSettings.getMaxPreviewLines();
int maxMatchLines = globalSettings.getMaxMatchLines();
int maxFragments = globalSettings.getMaxFragments();
int pageSize = globalSettings.getPageSize();
TimeValue searchTimeout = new TimeValue(globalSettings.getSearchTimeout());
float commitHashBoost = (float) globalSettings.getCommitHashBoost();
float commitSubjectBoost = (float) globalSettings.getCommitBodyBoost();
float commitBodyBoost = (float) globalSettings.getCommitBodyBoost();
float fileNameBoost = (float) globalSettings.getFileNameBoost();
// Execute ES query
int pages = 0;
long totalHits = 0;
long searchTime = 0;
SearchHit[] currentHits = {};
String error = "";
ArrayList<ImmutableMap<String, Object>> hitArray = new ArrayList<ImmutableMap<String, Object>>(currentHits.length);
ImmutableMap<String, Object> statistics = ImmutableMap.of();
if (params.doSearch) {
// Repo map is null iff user is a system administrator (don't need to validate permissions).
ImmutableMap<String, Repository> repoMap;
try {
validationService.validateForGlobal(Permission.SYS_ADMIN);
repoMap = null;
} catch (AuthorisationException e) {
repoMap = repositoryServiceManager.getRepositoryMap(validationService);
if (repoMap.isEmpty()) {
error = "You do not have permissions to access any repositories";
}
}
int startIndex = params.page * pageSize;
SearchRequestBuilder esReq = es.getClient().prepareSearch(ES_SEARCHALIAS).setFrom(startIndex).setSize(pageSize).setTimeout(searchTimeout).setFetchSource(true);
if (error != null && !error.isEmpty()) {
log.warn("Not performing search due to error {}", error);
} else {
// Build query source and perform query
QueryBuilder query = matchAllQuery();
if (params.searchString != null && !params.searchString.isEmpty()) {
QueryStringQueryBuilder queryStringQuery = queryString(params.searchString).analyzeWildcard(true).lenient(true).defaultOperator(QueryStringQueryBuilder.Operator.AND);
if (params.searchCommits) {
queryStringQuery.field("commit.subject", commitSubjectBoost).field("commit.hash", commitHashBoost).field("commit.body", commitBodyBoost);
}
if (params.searchFilenames) {
queryStringQuery.field("file.path", fileNameBoost);
}
if (params.searchCode) {
queryStringQuery.field("file.contents", 1);
}
query = queryStringQuery;
}
FilterBuilder filter = andFilter(boolFilter().must(repoMap == null ? matchAllFilter() : sf.aclFilter(repoMap), sf.refFilter(params.refNames.split(",")), sf.projectFilter(params.projectKeys.split(",")), sf.repositoryFilter(params.repoNames.split(",")), sf.extensionFilter(params.extensions.split(",")), sf.authorFilter(params.authorNames.split(","))), sf.dateRangeFilter(params.committedAfter, params.committedBefore));
FilteredQueryBuilder finalQuery = filteredQuery(query, filter);
esReq.setQuery(finalQuery).setHighlighterPreTags("\u0001").setHighlighterPostTags("\u0001").addHighlightedField("contents", 1, maxFragments);
String[] typeArray = {};
if (params.searchCommits) {
if (params.searchFilenames || params.searchCode) {
typeArray = new String[] { "commit", "file" };
} else {
typeArray = new String[] { "commit" };
}
} else if (params.searchFilenames || params.searchCode) {
typeArray = new String[] { "file" };
}
esReq.setTypes(typeArray);
// Build aggregations if statistics were requested
if (params.showStatistics) {
esReq.addAggregation(cardinality("authorCardinality").field("authoremail.untouched").precisionThreshold(1000)).addAggregation(terms("authorRanking").field("authoremail.untouched").size(25)).addAggregation(percentiles("charcountPercentiles").field("charcount").percentiles(PERCENTILES)).addAggregation(extendedStats("charcountStats").field("charcount")).addAggregation(filter("commitCount").filter(typeFilter("commit"))).addAggregation(cardinality("extensionCardinality").field("extension").precisionThreshold(1000)).addAggregation(terms("extensionRanking").field("extension").size(25)).addAggregation(percentiles("linecountPercentiles").field("linecount").percentiles(PERCENTILES)).addAggregation(extendedStats("linecountStats").field("linecount"));
}
SearchResponse esResp = null;
try {
esResp = esReq.get();
} catch (SearchPhaseExecutionException e) {
log.warn("Query failure", e);
error = "Make sure your query conforms to the Lucene/Elasticsearch query string syntax.";
}
if (esResp != null) {
SearchHits esHits = esResp.getHits();
totalHits = esHits.getTotalHits();
pages = (int) Math.min(Integer.MAX_VALUE, (totalHits + pageSize - 1) / pageSize);
currentHits = esHits.getHits();
searchTime = esResp.getTookInMillis();
for (ShardSearchFailure failure : esResp.getShardFailures()) {
log.warn("Shard failure {}", failure.reason());
if (error == null || error.isEmpty()) {
error = "Shard failure: " + failure.reason();
}
}
Aggregations aggs = esResp.getAggregations();
if (params.showStatistics && aggs != null && !aggs.asList().isEmpty()) {
Cardinality authorCardinality = aggs.get("authorCardinality");
Terms authorRanking = aggs.get("authorRanking");
Percentiles charcountPercentiles = aggs.get("charcountPercentiles");
Filter commitCount = aggs.get("commitCount");
ExtendedStats charcountStats = aggs.get("charcountStats");
Cardinality extensionCardinality = aggs.get("extensionCardinality");
Terms extensionRanking = aggs.get("extensionRanking");
Percentiles linecountPercentiles = aggs.get("linecountPercentiles");
ExtendedStats linecountStats = aggs.get("linecountStats");
statistics = new ImmutableMap.Builder<String, Object>().put("authorCardinality", authorCardinality.getValue()).put("authorRanking", getSoyRankingList(authorRanking, commitCount.getDocCount())).put("charcount", new ImmutableMap.Builder<String, Object>().put("average", charcountStats.getAvg()).put("max", Math.round(charcountStats.getMax())).put("min", Math.round(charcountStats.getMin())).put("percentiles", getSoyPercentileList(charcountPercentiles, PERCENTILES)).put("sum", Math.round(charcountStats.getSum())).build()).put("commitcount", commitCount.getDocCount()).put("extensionCardinality", extensionCardinality.getValue()).put("extensionRanking", getSoyRankingList(extensionRanking, charcountStats.getCount())).put("filecount", charcountStats.getCount()).put("linecount", new ImmutableMap.Builder<String, Object>().put("average", linecountStats.getAvg()).put("max", Math.round(linecountStats.getMax())).put("min", Math.round(linecountStats.getMin())).put("percentiles", getSoyPercentileList(linecountPercentiles, PERCENTILES)).put("sum", Math.round(linecountStats.getSum())).build()).build();
}
}
}
// Iterate through current page of search hits
for (SearchHit hit : currentHits) {
ImmutableMap<String, Object> hitData = searchHitToDataMap(hit, repoMap, maxPreviewLines, maxMatchLines, noHighlight);
if (hitData != null) {
hitArray.add(hitData);
}
}
}
// Render page
pbs.assembler().resources().requireContext("com.atlassian.auiplugin:aui-date-picker");
pbs.assembler().resources().requireContext("com.atlassian.auiplugin:aui-experimental-tooltips");
pbs.assembler().resources().requireWebResource("com.palantir.stash.stash-code-search:scs-resources");
resp.setContentType("text/html");
try {
String queryString = req.getQueryString();
String fullUri = req.getRequestURI() + "?" + (queryString == null ? "" : queryString.replaceAll("&?page=\\d*", ""));
ImmutableMap<String, Object> data = new ImmutableMap.Builder<String, Object>().put("pages", pages).put("currentPage", params.page).put("prevParams", params.soyParams).put("doSearch", params.doSearch).put("totalHits", totalHits).put("hitArray", hitArray).put("statistics", statistics).put("error", error).put("fullUri", fullUri).put("baseUrl", propertiesService.getBaseUrl().toASCIIString()).put("resultFrom", Math.min(totalHits, params.page * pageSize + 1)).put("resultTo", Math.min(totalHits, (params.page + 1) * pageSize)).put("searchTime", searchTime).build();
soyTemplateRenderer.render(resp.getWriter(), "com.palantir.stash.stash-code-search:codesearch-soy", "plugin.page.codesearch.searchPage", data);
} catch (Exception e) {
log.error("Error rendering Soy template", e);
}
}
use of org.elasticsearch.search.aggregations.Aggregations in project graylog2-server by Graylog2.
the class ESMessageList method doExtractResult.
@Override
public SearchType.Result doExtractResult(SearchJob job, Query query, MessageList searchType, org.graylog.shaded.elasticsearch7.org.elasticsearch.action.search.SearchResponse result, Aggregations aggregations, ESGeneratedQueryContext queryContext) {
final List<ResultMessageSummary> messages = StreamSupport.stream(result.getHits().spliterator(), false).map(ESMessageList::resultMessageFromSearchHit).map((resultMessage) -> ResultMessageSummary.create(resultMessage.highlightRanges, resultMessage.getMessage().getFields(), resultMessage.getIndex())).collect(Collectors.toList());
final String undecoratedQueryString = query.query().queryString();
final String queryString = this.esQueryDecorators.decorate(undecoratedQueryString, job, query);
final DateTime from = query.effectiveTimeRange(searchType).getFrom();
final DateTime to = query.effectiveTimeRange(searchType).getTo();
final SearchResponse searchResponse = SearchResponse.create(undecoratedQueryString, queryString, Collections.emptySet(), messages, Collections.emptySet(), 0, result.getHits().getTotalHits().value, from, to);
final SearchResponse decoratedSearchResponse = decoratorProcessor.decorateSearchResponse(searchResponse, searchType.decorators());
final MessageList.Result.Builder resultBuilder = MessageList.Result.result(searchType.id()).messages(decoratedSearchResponse.messages()).effectiveTimerange(AbsoluteRange.create(from, to)).totalResults(decoratedSearchResponse.totalResults());
return searchType.name().map(resultBuilder::name).orElse(resultBuilder).build();
}
use of org.elasticsearch.search.aggregations.Aggregations in project graylog2-server by Graylog2.
the class ESEventListTest method testSortingOfStreamsInDoExtractResult.
@Test
public void testSortingOfStreamsInDoExtractResult() {
final ESEventList esEventList = new TestESEventList();
final SearchJob searchJob = mock(SearchJob.class);
final Query query = mock(Query.class);
final SearchResponse searchResult = mock(SearchResponse.class);
final Aggregations metricAggregation = mock(Aggregations.class);
final ESGeneratedQueryContext queryContext = mock(ESGeneratedQueryContext.class);
final EventList eventList = EventList.builder().id("search-type-id").streams(ImmutableSet.of("stream-id-1", "stream-id-2")).build();
final EventList.Result eventResult = (EventList.Result) esEventList.doExtractResult(searchJob, query, eventList, searchResult, metricAggregation, queryContext);
assertThat(eventResult.events()).containsExactly(eventSummary("find-1", ImmutableSet.of("stream-id-1")), eventSummary("find-2", ImmutableSet.of("stream-id-2")), eventSummary("find-3", ImmutableSet.of("stream-id-1", "stream-id-2")));
}
use of org.elasticsearch.search.aggregations.Aggregations in project graylog2-server by Graylog2.
the class ESPivotTest method searchResultIncludesDocumentCount.
@Test
public void searchResultIncludesDocumentCount() throws InvalidRangeParametersException {
final long documentCount = 424242;
returnDocumentCount(queryResult, documentCount);
final Aggregations mockMetricAggregation = createTimestampRangeAggregations((double) new Date().getTime(), (double) new Date().getTime());
when(queryResult.getAggregations()).thenReturn(mockMetricAggregation);
when(query.effectiveTimeRange(pivot)).thenReturn(RelativeRange.create(300));
final SearchType.Result result = this.esPivot.doExtractResult(job, query, pivot, queryResult, aggregations, queryContext);
final PivotResult pivotResult = (PivotResult) result;
assertThat(pivotResult.total()).isEqualTo(documentCount);
}
Aggregations