use of org.nzbhydra.searching.searchrequests.SearchRequest in project nzbhydra2 by theotherp.
the class NewznabXmlTransformerTest method shouldUseCorrectApplicationType.
@Test
public void shouldUseCorrectApplicationType() {
SearchRequest searchRequest = new SearchRequest(SearchSource.INTERNAL, SearchType.SEARCH, 0, 100);
SearchResultItem searchResultItem = new SearchResultItem();
searchResultItem.setIndexer(indexerMock);
searchResultItem.setCategory(new Category());
searchRequest.setDownloadType(DownloadType.NZB);
NewznabXmlItem item = testee.buildRssItem(searchResultItem, searchRequest);
assertThat(item.getEnclosure().getType()).isEqualTo("application/x-nzb");
searchRequest.setDownloadType(DownloadType.TORRENT);
item = testee.buildRssItem(searchResultItem, searchRequest);
assertThat(item.getEnclosure().getType()).isEqualTo("application/x-bittorrent");
}
use of org.nzbhydra.searching.searchrequests.SearchRequest in project nzbhydra2 by theotherp.
the class IndexerForSearchSelector method pickIndexers.
public IndexerForSearchSelection pickIndexers(SearchRequest searchRequest) {
this.searchRequest = searchRequest;
// Check any indexer that's not disabled by the user. If it's disabled by the system it will be deselected with a proper message later
List<Indexer> eligibleIndexers = searchModuleProvider.getIndexers().stream().filter(x -> x.getConfig().getState() != IndexerConfig.State.DISABLED_USER).collect(Collectors.toList());
if (eligibleIndexers.isEmpty()) {
logger.warn("You don't have any enabled indexers");
return new IndexerForSearchSelection();
}
List<Indexer> selectedIndexers = new ArrayList<>();
logger.debug("Picking indexers out of " + eligibleIndexers.size());
Stopwatch stopwatch = Stopwatch.createStarted();
for (Indexer indexer : eligibleIndexers) {
if (!checkInternalAndNotEvenShown(indexer)) {
continue;
}
if (!checkIndexerSelectedByUser(indexer)) {
continue;
}
if (!checkIndexerConfigComplete(indexer)) {
continue;
}
if (!checkSearchSource(indexer)) {
continue;
}
if (!checkIndexerStatus(indexer)) {
continue;
}
if (!checkTorznabOnlyUsedForTorrentOrInternalSearches(indexer)) {
continue;
}
if (!checkDisabledForCategory(indexer)) {
continue;
}
if (!checkSchedule(indexer)) {
continue;
}
if (!checkLoadLimiting(indexer)) {
continue;
}
if (!checkSearchId(indexer)) {
continue;
}
if (!checkIndexerHitLimit(indexer)) {
continue;
}
selectedIndexers.add(indexer);
}
logger.debug(LoggingMarkers.PERFORMANCE, "Selection of indexers took {}ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
if (selectedIndexers.isEmpty()) {
logger.warn("No indexers were selected for this search. You probably don't have any indexers configured which support the provided ID type or all of your indexers which do are currently disabled. You can enable query generation to work around this.");
} else {
logger.info("Selected {} out of {} indexers: {}", selectedIndexers.size(), eligibleIndexers.size(), Joiner.on(", ").join(selectedIndexers.stream().map(Indexer::getName).collect(Collectors.toList())));
}
eventPublisher.publishEvent(new IndexerSelectionEvent(searchRequest, selectedIndexers.size()));
return new IndexerForSearchSelection(notSelectedIndersWithReason, selectedIndexers);
}
use of org.nzbhydra.searching.searchrequests.SearchRequest in project nzbhydra2 by theotherp.
the class SearchWeb method search.
@Secured({ "ROLE_USER" })
@RequestMapping(value = "/internalapi/search", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE)
public SearchResponse search(@RequestBody SearchRequestParameters parameters) {
SearchRequest searchRequest = createSearchRequest(parameters);
Stopwatch stopwatch = Stopwatch.createStarted();
logger.info("New search request: " + searchRequest);
org.nzbhydra.searching.SearchResult searchResult = searcher.search(searchRequest);
SearchResponse searchResponse = searchResultProcessor.createSearchResponse(searchResult);
lock.lock();
SearchState searchState = searchStates.get(searchRequest.getSearchRequestId());
searchState.setSearchFinished(true);
lock.unlock();
logger.info("Search took {}ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
return searchResponse;
}
use of org.nzbhydra.searching.searchrequests.SearchRequest in project nzbhydra2 by theotherp.
the class Searcher method search.
public SearchResult search(SearchRequest searchRequest) {
Stopwatch stopwatch = Stopwatch.createStarted();
eventPublisher.publishEvent(new SearchEvent(searchRequest));
SearchCacheEntry searchCacheEntry = getSearchCacheEntry(searchRequest);
SearchResult searchResult = new SearchResult();
// LATER default for limit
int numberOfWantedResults = searchRequest.getOffset().orElse(0) + searchRequest.getLimit().orElse(100);
searchResult.setPickingResult(searchCacheEntry.getPickingResult());
Map<Indexer, List<IndexerSearchResult>> indexersToSearchAndTheirResults = getIndexerSearchResultsToSearch(searchCacheEntry.getIndexerSearchResultsByIndexer());
List<SearchResultItem> searchResultItems = searchCacheEntry.getSearchResultItems();
while (indexersToSearchAndTheirResults.size() > 0 && (searchResultItems.size() < numberOfWantedResults || searchRequest.isLoadAll())) {
if (shutdownRequested) {
break;
}
if (searchRequest.isLoadAll()) {
logger.debug("Going to call {} indexers because {} results were loaded yet but more results are available and all were requested", indexersToSearchAndTheirResults.size(), searchCacheEntry.getNumberOfFoundResults());
int maxResultsToLoad = searchRequest.getIndexers().orElse(Sets.newHashSet("")).size() * 1000;
if (searchResultItems.size() > maxResultsToLoad) {
logger.info("Aborting loading all results because more than {} results were already loaded and we don't want to hammer the indexers too much", maxResultsToLoad);
break;
}
} else {
logger.debug("Going to call {} indexers because {} of {} wanted results were loaded yet", indexersToSearchAndTheirResults.size(), searchCacheEntry.getNumberOfFoundResults(), numberOfWantedResults);
}
// Do the actual search
indexersToSearchAndTheirResults = callSearchModules(searchRequest, indexersToSearchAndTheirResults);
// Update cache
searchCacheEntry.getIndexerSearchResultsByIndexer().putAll(indexersToSearchAndTheirResults);
searchRequestCache.put(searchRequest.hashCode(), searchCacheEntry);
// Use search result items from the cache which contains *all* search searchResults, not just the latest. That allows finding duplicates over multiple searches
searchResultItems = searchCacheEntry.getIndexerSearchResultsByIndexer().values().stream().flatMap(Collection::stream).filter(IndexerSearchResult::isWasSuccessful).flatMap(x -> x.getSearchResultItems().stream()).distinct().collect(Collectors.toList());
DuplicateDetectionResult duplicateDetectionResult = duplicateDetector.detectDuplicates(searchResultItems);
// Save to database
createOrUpdateIndexerSearchEntity(searchCacheEntry, indexersToSearchAndTheirResults, duplicateDetectionResult);
// Remove duplicates for external searches
if (searchRequest.getSource() == SearchSource.API) {
int beforeDuplicateRemoval = searchResultItems.size();
searchResultItems = getNewestSearchResultItemFromEachDuplicateGroup(duplicateDetectionResult.getDuplicateGroups());
searchResult.setNumberOfRemovedDuplicates(beforeDuplicateRemoval - searchResultItems.size());
}
// Set the rejection counts from all searches, this and previous
searchCacheEntry.getReasonsForRejection().clear();
indexersToSearchAndTheirResults.values().forEach(x -> x.forEach(y -> y.getReasonsForRejection().entrySet().forEach(z -> searchCacheEntry.getReasonsForRejection().add(z.getElement(), z.getCount()))));
// Update indexersToSearchAndTheirResults to remove indexers which threw an error or don't have any more results
indexersToSearchAndTheirResults = getIndexerSearchResultsToSearch(indexersToSearchAndTheirResults);
searchCacheEntry.setSearchResultItems(searchResultItems);
}
searchResult.setNumberOfTotalAvailableResults(searchCacheEntry.getNumberOfTotalAvailableResults());
searchResult.setIndexerSearchResults(searchCacheEntry.getIndexerSearchResultsByIndexer().entrySet().stream().map(x -> Iterables.getLast(x.getValue())).collect(Collectors.toList()));
searchResult.setReasonsForRejection(searchCacheEntry.getReasonsForRejection());
searchResultItems.sort(Comparator.comparingLong(x -> x.getBestDate().getEpochSecond()));
Collections.reverse(searchResultItems);
spliceSearchResultItemsAccordingToOffsetAndLimit(searchRequest, searchResult, searchResultItems);
logger.debug(LoggingMarkers.PERFORMANCE, "Internal search took {}ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
return searchResult;
}
use of org.nzbhydra.searching.searchrequests.SearchRequest in project nzbhydra2 by theotherp.
the class SearchingIntegrationTest method shouldSearch.
@Test
public void shouldSearch() throws Exception {
// One indexer has two results, the other one. A request is done with a limit of 2. Both indexers return one result. Another request is done with offset 2, the first indexer returns its second result
String expectedContent1a = Resources.toString(Resources.getResource(SearchingIntegrationTest.class, "simplesearchresult1a.xml"), Charsets.UTF_8);
String expectedContent1b = Resources.toString(Resources.getResource(SearchingIntegrationTest.class, "simplesearchresult1b.xml"), Charsets.UTF_8);
String expectedContent2 = Resources.toString(Resources.getResource(SearchingIntegrationTest.class, "simplesearchresult2.xml"), Charsets.UTF_8);
mockWebServer.enqueue(new MockResponse().setBody(expectedContent1a).setHeader("Content-Type", "application/xml; charset=utf-8"));
mockWebServer.enqueue(new MockResponse().setBody(expectedContent2).setHeader("Content-Type", "application/xml; charset=utf-8"));
mockWebServer.enqueue(new MockResponse().setBody(expectedContent1b).setHeader("Content-Type", "application/xml; charset=utf-8"));
SearchRequest searchRequest = new SearchRequest(SearchSource.INTERNAL, SearchType.SEARCH, 0, 2);
SearchResult searchResult = searcher.search(searchRequest);
assertThat(searchResult.getSearchResultItems().size(), is(2));
searchRequest.setLimit(100);
searchRequest.setOffset(2);
searchResult = searcher.search(searchRequest);
assertThat(searchResult.getSearchResultItems().size(), is(1));
assertThat(searchResult.getSearchResultItems().get(0).getTitle(), is("itemTitle1b"));
}
Aggregations