use of com.yelp.nrtsearch.server.luceneserver.search.SearchCutoffWrapper.CollectionTimeoutException in project nrtsearch by Yelp.
the class SearchCutoffWrapperTest method testCollectionErrorTimeoutMulti.
@Test
public void testCollectionErrorTimeoutMulti() throws IOException {
TestCollectorManager manager = new TestCollectorManager();
TimeSettableWrapper wrapper = new TimeSettableWrapper(manager, 5, true, () -> {
});
wrapper.currentTime = 10000;
Collector collector1 = wrapper.newCollector();
Collector collector2 = wrapper.newCollector();
collector1.getLeafCollector(null);
collector2.getLeafCollector(null);
wrapper.currentTime += 2000;
collector1.getLeafCollector(null);
collector2.getLeafCollector(null);
wrapper.currentTime += 2000;
collector1.getLeafCollector(null);
collector2.getLeafCollector(null);
wrapper.currentTime += 2000;
try {
collector1.getLeafCollector(null);
fail();
} catch (CollectionTimeoutException e) {
assertEquals(EXPECTED_TIMEOUT_MESSAGE, e.getMessage());
}
try {
collector2.getLeafCollector(null);
fail();
} catch (CollectionTimeoutException e) {
assertEquals(EXPECTED_TIMEOUT_MESSAGE, e.getMessage());
}
}
use of com.yelp.nrtsearch.server.luceneserver.search.SearchCutoffWrapper.CollectionTimeoutException in project nrtsearch by Yelp.
the class SearchCutoffWrapperTest method testCollectionErrorTimeout.
@Test
public void testCollectionErrorTimeout() throws IOException {
TestCollectorManager manager = new TestCollectorManager();
TimeSettableWrapper wrapper = new TimeSettableWrapper(manager, 5, true, () -> {
});
wrapper.currentTime = 10000;
Collector collector = wrapper.newCollector();
collector.getLeafCollector(null);
wrapper.currentTime += 2000;
collector.getLeafCollector(null);
wrapper.currentTime += 2000;
collector.getLeafCollector(null);
wrapper.currentTime += 2000;
try {
collector.getLeafCollector(null);
fail();
} catch (CollectionTimeoutException e) {
assertEquals(EXPECTED_TIMEOUT_MESSAGE, e.getMessage());
}
}
use of com.yelp.nrtsearch.server.luceneserver.search.SearchCutoffWrapper.CollectionTimeoutException in project nrtsearch by Yelp.
the class SearchHandler method handle.
@Override
public SearchResponse handle(IndexState indexState, SearchRequest searchRequest) throws SearchHandlerException {
// this request may have been waiting in the grpc queue too long
DeadlineUtils.checkDeadline("SearchHandler: start", "SEARCH");
ShardState shardState = indexState.getShard(0);
// Index won't be started if we are currently warming
if (!warming) {
indexState.verifyStarted();
}
var diagnostics = SearchResponse.Diagnostics.newBuilder();
SearcherTaxonomyManager.SearcherAndTaxonomy s = null;
SearchContext searchContext;
try {
s = getSearcherAndTaxonomy(searchRequest, shardState, diagnostics, threadPoolExecutor);
ProfileResult.Builder profileResultBuilder = null;
if (searchRequest.getProfile()) {
profileResultBuilder = ProfileResult.newBuilder();
}
searchContext = SearchRequestProcessor.buildContextForRequest(searchRequest, indexState, shardState, s, profileResultBuilder);
long searchStartTime = System.nanoTime();
SearcherResult searcherResult;
TopDocs hits;
if (!searchRequest.getFacetsList().isEmpty()) {
if (!(searchContext.getQuery() instanceof DrillDownQuery)) {
throw new IllegalArgumentException("Can only use DrillSideways on DrillDownQuery");
}
DrillDownQuery ddq = (DrillDownQuery) searchContext.getQuery();
List<FacetResult> grpcFacetResults = new ArrayList<>();
DrillSideways drillS = new DrillSidewaysImpl(s.searcher, indexState.facetsConfig, s.taxonomyReader, searchRequest.getFacetsList(), s, shardState, searchContext.getQueryFields(), grpcFacetResults, threadPoolExecutor, diagnostics);
DrillSideways.ConcurrentDrillSidewaysResult<SearcherResult> concurrentDrillSidewaysResult;
try {
concurrentDrillSidewaysResult = drillS.search(ddq, searchContext.getCollector().getWrappedManager());
} catch (RuntimeException e) {
// Searching with DrillSideways wraps exceptions in a few layers.
// Try to find if this was caused by a timeout, if so, re-wrap
// so that the top level exception is the same as when not using facets.
CollectionTimeoutException timeoutException = findTimeoutException(e);
if (timeoutException != null) {
throw new CollectionTimeoutException(timeoutException.getMessage(), e);
}
throw e;
}
searcherResult = concurrentDrillSidewaysResult.collectorResult;
hits = searcherResult.getTopDocs();
searchContext.getResponseBuilder().addAllFacetResult(grpcFacetResults);
searchContext.getResponseBuilder().addAllFacetResult(FacetTopDocs.facetTopDocsSample(hits, searchRequest.getFacetsList(), indexState, s.searcher, diagnostics));
} else {
searcherResult = s.searcher.search(searchContext.getQuery(), searchContext.getCollector().getWrappedManager());
hits = searcherResult.getTopDocs();
}
// add results from any extra collectors
searchContext.getResponseBuilder().putAllCollectorResults(searcherResult.getCollectorResults());
searchContext.getResponseBuilder().setHitTimeout(searchContext.getCollector().hadTimeout());
searchContext.getResponseBuilder().setTerminatedEarly(searchContext.getCollector().terminatedEarly());
diagnostics.setFirstPassSearchTimeMs(((System.nanoTime() - searchStartTime) / 1000000.0));
DeadlineUtils.checkDeadline("SearchHandler: post recall", "SEARCH");
// add detailed timing metrics for query execution
if (profileResultBuilder != null) {
searchContext.getCollector().maybeAddProfiling(profileResultBuilder);
}
long rescoreStartTime = System.nanoTime();
if (!searchContext.getRescorers().isEmpty()) {
for (RescoreTask rescorer : searchContext.getRescorers()) {
long startNS = System.nanoTime();
hits = rescorer.rescore(hits, searchContext);
long endNS = System.nanoTime();
diagnostics.putRescorersTimeMs(rescorer.getName(), (endNS - startNS) / 1000000.0);
DeadlineUtils.checkDeadline("SearchHandler: post " + rescorer.getName(), "SEARCH");
}
diagnostics.setRescoreTimeMs(((System.nanoTime() - rescoreStartTime) / 1000000.0));
}
long t0 = System.nanoTime();
hits = getHitsFromOffset(hits, searchContext.getStartHit(), searchContext.getTopHits());
// create Hit.Builder for each hit, and populate with lucene doc id and ranking info
setResponseHits(searchContext, hits);
// fill Hit.Builder with requested fields
fetchFields(searchContext);
SearchState.Builder searchState = SearchState.newBuilder();
searchContext.getResponseBuilder().setSearchState(searchState);
searchState.setTimestamp(searchContext.getTimestampSec());
// Record searcher version that handled this request:
searchState.setSearcherVersion(((DirectoryReader) s.searcher.getIndexReader()).getVersion());
// Fill in lastDoc for searchAfter:
if (hits.scoreDocs.length != 0) {
ScoreDoc lastHit = hits.scoreDocs[hits.scoreDocs.length - 1];
searchState.setLastDocId(lastHit.doc);
searchContext.getCollector().fillLastHit(searchState, lastHit);
}
diagnostics.setGetFieldsTimeMs(((System.nanoTime() - t0) / 1000000.0));
searchContext.getResponseBuilder().setDiagnostics(diagnostics);
if (profileResultBuilder != null) {
searchContext.getResponseBuilder().setProfileResult(profileResultBuilder);
}
} catch (IOException | InterruptedException | ExecutionException e) {
logger.warn(e.getMessage(), e);
throw new SearchHandlerException(e);
} finally {
// does:
try {
if (s != null) {
shardState.release(s);
}
} catch (IOException e) {
logger.warn("Failed to release searcher reference previously acquired by acquire()", e);
throw new SearchHandlerException(e);
}
}
// Add searchRequest to warmer if needed
try {
if (!warming && indexState.getWarmer() != null) {
indexState.getWarmer().addSearchRequest(searchRequest);
}
} catch (Exception e) {
logger.error("Unable to add warming query", e);
}
// if we are out of time, don't bother with serialization
DeadlineUtils.checkDeadline("SearchHandler: end", "SEARCH");
return searchContext.getResponseBuilder().build();
}
Aggregations