use of org.apache.solr.client.solrj.response.SpellCheckResponse in project jackrabbit-oak by apache.
the class SolrQueryIndex method getIterator.
private AbstractIterator<SolrResultRow> getIterator(final Filter filter, final IndexPlan plan, final String parent, final int parentDepth, final OakSolrConfiguration configuration, final SolrServer solrServer, final LMSEstimator estimator) {
return new AbstractIterator<SolrResultRow>() {
public Collection<FacetField> facetFields = new LinkedList<FacetField>();
private final Set<String> seenPaths = Sets.newHashSet();
private final Deque<SolrResultRow> queue = Queues.newArrayDeque();
private int offset = 0;
private boolean noDocs = false;
private long numFound = 0;
@Override
protected SolrResultRow computeNext() {
if (!queue.isEmpty() || loadDocs()) {
return queue.remove();
}
return endOfData();
}
private SolrResultRow convertToRow(SolrDocument doc) {
String path = String.valueOf(doc.getFieldValue(configuration.getPathField()));
if ("".equals(path)) {
path = "/";
}
if (!parent.isEmpty()) {
path = getAncestorPath(path, parentDepth);
// avoid duplicate entries
if (seenPaths.contains(path)) {
return null;
}
seenPaths.add(path);
}
float score = 0f;
Object scoreObj = doc.get("score");
if (scoreObj != null) {
score = (Float) scoreObj;
}
return new SolrResultRow(path, score, doc, facetFields);
}
/**
* Loads the Solr documents in batches
* @return true if any document is loaded
*/
private boolean loadDocs() {
if (noDocs) {
return false;
}
try {
if (log.isDebugEnabled()) {
log.debug("converting filter {}", filter);
}
SolrQuery query = FilterQueryParser.getQuery(filter, plan, configuration);
if (numFound > 0) {
long rows = configuration.getRows();
long maxQueries = numFound / 2;
if (maxQueries > configuration.getRows()) {
// adjust the rows to avoid making more than 3 Solr requests for this particular query
rows = maxQueries;
query.setParam("rows", String.valueOf(rows));
}
long newOffset = configuration.getRows() + offset * rows;
if (newOffset >= numFound) {
return false;
}
query.setParam("start", String.valueOf(newOffset));
offset++;
}
if (log.isDebugEnabled()) {
log.debug("sending query {}", query);
}
QueryResponse queryResponse = solrServer.query(query);
if (log.isDebugEnabled()) {
log.debug("getting response {}", queryResponse.getHeader());
}
SolrDocumentList docs = queryResponse.getResults();
if (docs != null) {
numFound = docs.getNumFound();
estimator.update(filter, docs);
Map<String, Map<String, List<String>>> highlighting = queryResponse.getHighlighting();
for (SolrDocument doc : docs) {
// handle highlight
if (highlighting != null) {
Object pathObject = doc.getFieldValue(configuration.getPathField());
if (pathObject != null && highlighting.get(String.valueOf(pathObject)) != null) {
Map<String, List<String>> value = highlighting.get(String.valueOf(pathObject));
for (Map.Entry<String, List<String>> entry : value.entrySet()) {
// all highlighted values end up in 'rep:excerpt', regardless of field match
for (String v : entry.getValue()) {
doc.addField(QueryImpl.REP_EXCERPT, v);
}
}
}
}
SolrResultRow row = convertToRow(doc);
if (row != null) {
queue.add(row);
}
}
}
// get facets
List<FacetField> returnedFieldFacet = queryResponse.getFacetFields();
if (returnedFieldFacet != null) {
facetFields.addAll(returnedFieldFacet);
}
// filter facets on doc paths
if (!facetFields.isEmpty() && docs != null) {
for (SolrDocument doc : docs) {
String path = String.valueOf(doc.getFieldValue(configuration.getPathField()));
// if facet path doesn't exist for the calling user, filter the facet for this doc
for (FacetField ff : facetFields) {
if (!filter.isAccessible(path + "/" + ff.getName())) {
filterFacet(doc, ff);
}
}
}
}
// handle spellcheck
SpellCheckResponse spellCheckResponse = queryResponse.getSpellCheckResponse();
if (spellCheckResponse != null && spellCheckResponse.getSuggestions() != null && spellCheckResponse.getSuggestions().size() > 0) {
putSpellChecks(spellCheckResponse, queue, filter, configuration, solrServer);
noDocs = true;
}
// handle suggest
NamedList<Object> response = queryResponse.getResponse();
Map suggest = (Map) response.get("suggest");
if (suggest != null) {
Set<Map.Entry<String, Object>> suggestEntries = suggest.entrySet();
if (!suggestEntries.isEmpty()) {
putSuggestions(suggestEntries, queue, filter, configuration, solrServer);
noDocs = true;
}
}
} catch (Exception e) {
if (log.isWarnEnabled()) {
log.warn("query via {} failed.", solrServer, e);
}
}
return !queue.isEmpty();
}
};
}
use of org.apache.solr.client.solrj.response.SpellCheckResponse in project dataverse by IQSS.
the class SearchServiceBean method search.
/**
* Import note: "onlyDatatRelatedToMe" relies on filterQueries for providing
* access to Private Data for the correct user
*
* In other words "onlyDatatRelatedToMe", negates other filter Queries
* related to permissions
*
* @param user
* @param dataverse
* @param query
* @param filterQueries
* @param sortField
* @param sortOrder
* @param paginationStart
* @param onlyDatatRelatedToMe
* @param numResultsPerPage
* @param retrieveEntities - look up dvobject entities with .find() (potentially expensive!)
* @return
* @throws SearchException
*/
public SolrQueryResponse search(DataverseRequest dataverseRequest, Dataverse dataverse, String query, List<String> filterQueries, String sortField, String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage, boolean retrieveEntities) throws SearchException {
if (paginationStart < 0) {
throw new IllegalArgumentException("paginationStart must be 0 or greater");
}
if (numResultsPerPage < 1) {
throw new IllegalArgumentException("numResultsPerPage must be 1 or greater");
}
SolrQuery solrQuery = new SolrQuery();
query = SearchUtil.sanitizeQuery(query);
solrQuery.setQuery(query);
// SortClause foo = new SortClause("name", SolrQuery.ORDER.desc);
// if (query.equals("*") || query.equals("*:*")) {
// solrQuery.setSort(new SortClause(SearchFields.NAME_SORT, SolrQuery.ORDER.asc));
solrQuery.setSort(new SortClause(sortField, sortOrder));
// } else {
// solrQuery.setSort(sortClause);
// }
// solrQuery.setSort(sortClause);
solrQuery.setHighlight(true).setHighlightSnippets(1);
Integer fragSize = systemConfig.getSearchHighlightFragmentSize();
if (fragSize != null) {
solrQuery.setHighlightFragsize(fragSize);
}
solrQuery.setHighlightSimplePre("<span class=\"search-term-match\">");
solrQuery.setHighlightSimplePost("</span>");
Map<String, String> solrFieldsToHightlightOnMap = new HashMap<>();
// TODO: Do not hard code "Name" etc as English here.
solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name");
solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation");
solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type");
solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description");
solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name");
solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label");
solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type");
solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Date");
solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("dataset.metadata.persistentId"));
/**
* @todo Dataverse subject and affiliation should be highlighted but
* this is commented out right now because the "friendly" names are not
* being shown on the dataverse cards. See also
* https://github.com/IQSS/dataverse/issues/1431
*/
// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject");
// solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation");
/**
* @todo: show highlight on file card?
* https://redmine.hmdc.harvard.edu/issues/3848
*/
solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension");
solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag");
List<DatasetFieldType> datasetFields = datasetFieldService.findAllOrderedById();
for (DatasetFieldType datasetFieldType : datasetFields) {
String solrField = datasetFieldType.getSolrField().getNameSearchable();
String displayName = datasetFieldType.getDisplayName();
solrFieldsToHightlightOnMap.put(solrField, displayName);
}
for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
String solrField = entry.getKey();
// String displayName = entry.getValue();
solrQuery.addHighlightField(solrField);
}
solrQuery.setParam("fl", "*,score");
solrQuery.setParam("qt", "/select");
solrQuery.setParam("facet", "true");
/**
* @todo: do we need facet.query?
*/
solrQuery.setParam("facet.query", "*");
for (String filterQuery : filterQueries) {
solrQuery.addFilterQuery(filterQuery);
}
// -----------------------------------
// PERMISSION FILTER QUERY
// -----------------------------------
String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe);
if (permissionFilterQuery != null) {
solrQuery.addFilterQuery(permissionFilterQuery);
}
// -----------------------------------
// Facets to Retrieve
// -----------------------------------
// solrQuery.addFacetField(SearchFields.HOST_DATAVERSE);
// solrQuery.addFacetField(SearchFields.AUTHOR_STRING);
solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY);
solrQuery.addFacetField(SearchFields.METADATA_SOURCE);
// solrQuery.addFacetField(SearchFields.AFFILIATION);
solrQuery.addFacetField(SearchFields.PUBLICATION_DATE);
/**
* @todo when a new method on datasetFieldService is available
* (retrieveFacetsByDataverse?) only show the facets that the dataverse
* in question wants to show (and in the right order):
* https://redmine.hmdc.harvard.edu/issues/3490
*
* also, findAll only returns advancedSearchField = true... we should
* probably introduce the "isFacetable" boolean rather than caring about
* if advancedSearchField is true or false
*/
if (dataverse != null) {
for (DataverseFacet dataverseFacet : dataverse.getDataverseFacets()) {
DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType();
solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable());
}
}
solrQuery.addFacetField(SearchFields.FILE_TYPE);
/**
* @todo: hide the extra line this shows in the GUI... at least it's
* last...
*/
solrQuery.addFacetField(SearchFields.TYPE);
solrQuery.addFacetField(SearchFields.FILE_TAG);
if (!systemConfig.isPublicInstall()) {
solrQuery.addFacetField(SearchFields.ACCESS);
}
/**
* @todo: do sanity checking... throw error if negative
*/
solrQuery.setStart(paginationStart);
/**
* @todo: decide if year CITATION_YEAR is good enough or if we should
* support CITATION_DATE
*/
// Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.UK);
// calendar.set(2010, 1, 1);
// Date start = calendar.getTime();
// calendar.set(2013, 1, 1);
// Date end = calendar.getTime();
// solrQuery.addDateRangeFacet(SearchFields.CITATION_DATE, start, end, "+1MONTH");
/**
* @todo make this configurable
*/
int thisYear = Calendar.getInstance().get(Calendar.YEAR);
/**
* @todo: odd or even makes a difference. Couldn't find value of 2014
* when this was set to 2000
*/
final int citationYearRangeStart = 1901;
final int citationYearRangeEnd = thisYear;
final int citationYearRangeSpan = 2;
/**
* @todo: these are dates and should be "range facets" not "field
* facets"
*
* right now they are lumped in with the datasetFieldService.findAll()
* above
*/
// solrQuery.addNumericRangeFacet(SearchFields.PRODUCTION_DATE_YEAR_ONLY, citationYearRangeStart, citationYearRangeEnd, citationYearRangeSpan);
// solrQuery.addNumericRangeFacet(SearchFields.DISTRIBUTION_DATE_YEAR_ONLY, citationYearRangeStart, citationYearRangeEnd, citationYearRangeSpan);
solrQuery.setRows(numResultsPerPage);
logger.fine("Solr query:" + solrQuery);
// -----------------------------------
// Make the solr query
// -----------------------------------
QueryResponse queryResponse = null;
try {
queryResponse = solrServer.query(solrQuery);
} catch (RemoteSolrException ex) {
String messageFromSolr = ex.getLocalizedMessage();
String error = "Search Syntax Error: ";
String stringToHide = "org.apache.solr.search.SyntaxError: ";
if (messageFromSolr.startsWith(stringToHide)) {
// hide "org.apache.solr..."
error += messageFromSolr.substring(stringToHide.length());
} else {
error += messageFromSolr;
}
logger.info(error);
SolrQueryResponse exceptionSolrQueryResponse = new SolrQueryResponse(solrQuery);
exceptionSolrQueryResponse.setError(error);
// we can't show anything because of the search syntax error
long zeroNumResultsFound = 0;
long zeroGetResultsStart = 0;
List<SolrSearchResult> emptySolrSearchResults = new ArrayList<>();
List<FacetCategory> exceptionFacetCategoryList = new ArrayList<>();
Map<String, List<String>> emptySpellingSuggestion = new HashMap<>();
exceptionSolrQueryResponse.setNumResultsFound(zeroNumResultsFound);
exceptionSolrQueryResponse.setResultsStart(zeroGetResultsStart);
exceptionSolrQueryResponse.setSolrSearchResults(emptySolrSearchResults);
exceptionSolrQueryResponse.setFacetCategoryList(exceptionFacetCategoryList);
exceptionSolrQueryResponse.setTypeFacetCategories(exceptionFacetCategoryList);
exceptionSolrQueryResponse.setSpellingSuggestionsByToken(emptySpellingSuggestion);
return exceptionSolrQueryResponse;
} catch (SolrServerException | IOException ex) {
throw new SearchException("Internal Dataverse Search Engine Error", ex);
}
SolrDocumentList docs = queryResponse.getResults();
List<SolrSearchResult> solrSearchResults = new ArrayList<>();
/**
* @todo refactor SearchFields to a hashmap (or something? put in
* database? internationalize?) to avoid the crazy reflection and string
* manipulation below
*/
Object searchFieldsObject = new SearchFields();
Field[] staticSearchFields = searchFieldsObject.getClass().getDeclaredFields();
String titleSolrField = null;
try {
DatasetFieldType titleDatasetField = datasetFieldService.findByName(DatasetFieldConstant.title);
titleSolrField = titleDatasetField.getSolrField().getNameSearchable();
} catch (EJBTransactionRolledbackException ex) {
logger.info("Couldn't find " + DatasetFieldConstant.title);
if (ex.getCause() instanceof TransactionRolledbackLocalException) {
if (ex.getCause().getCause() instanceof NoResultException) {
logger.info("Caught NoResultException");
}
}
}
Map<String, String> datasetfieldFriendlyNamesBySolrField = new HashMap<>();
Map<String, String> staticSolrFieldFriendlyNamesBySolrField = new HashMap<>();
String baseUrl = systemConfig.getDataverseSiteUrl();
for (SolrDocument solrDocument : docs) {
String id = (String) solrDocument.getFieldValue(SearchFields.ID);
Long entityid = (Long) solrDocument.getFieldValue(SearchFields.ENTITY_ID);
String type = (String) solrDocument.getFieldValue(SearchFields.TYPE);
float score = (Float) solrDocument.getFieldValue(SearchFields.RELEVANCE);
logger.fine("score for " + id + ": " + score);
String identifier = (String) solrDocument.getFieldValue(SearchFields.IDENTIFIER);
String citation = (String) solrDocument.getFieldValue(SearchFields.DATASET_CITATION);
String citationPlainHtml = (String) solrDocument.getFieldValue(SearchFields.DATASET_CITATION_HTML);
String persistentUrl = (String) solrDocument.getFieldValue(SearchFields.PERSISTENT_URL);
String name = (String) solrDocument.getFieldValue(SearchFields.NAME);
String nameSort = (String) solrDocument.getFieldValue(SearchFields.NAME_SORT);
// ArrayList titles = (ArrayList) solrDocument.getFieldValues(SearchFields.TITLE);
String title = (String) solrDocument.getFieldValue(titleSolrField);
Long datasetVersionId = (Long) solrDocument.getFieldValue(SearchFields.DATASET_VERSION_ID);
String deaccessionReason = (String) solrDocument.getFieldValue(SearchFields.DATASET_DEACCESSION_REASON);
// logger.info("titleSolrField: " + titleSolrField);
// logger.info("title: " + title);
String filetype = (String) solrDocument.getFieldValue(SearchFields.FILE_TYPE_FRIENDLY);
String fileContentType = (String) solrDocument.getFieldValue(SearchFields.FILE_CONTENT_TYPE);
Date release_or_create_date = (Date) solrDocument.getFieldValue(SearchFields.RELEASE_OR_CREATE_DATE);
String dateToDisplayOnCard = (String) solrDocument.getFirstValue(SearchFields.RELEASE_OR_CREATE_DATE_SEARCHABLE_TEXT);
String dvTree = (String) solrDocument.getFirstValue(SearchFields.SUBTREE);
List<String> matchedFields = new ArrayList<>();
List<Highlight> highlights = new ArrayList<>();
Map<SolrField, Highlight> highlightsMap = new HashMap<>();
Map<SolrField, List<String>> highlightsMap2 = new HashMap<>();
Map<String, Highlight> highlightsMap3 = new HashMap<>();
if (queryResponse.getHighlighting().get(id) != null) {
for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
String field = entry.getKey();
String displayName = entry.getValue();
List<String> highlightSnippets = queryResponse.getHighlighting().get(id).get(field);
if (highlightSnippets != null) {
matchedFields.add(field);
/**
* @todo only SolrField.SolrType.STRING? that's not
* right... knit the SolrField object more into the
* highlighting stuff
*/
SolrField solrField = new SolrField(field, SolrField.SolrType.STRING, true, true);
Highlight highlight = new Highlight(solrField, highlightSnippets, displayName);
highlights.add(highlight);
highlightsMap.put(solrField, highlight);
highlightsMap2.put(solrField, highlightSnippets);
highlightsMap3.put(field, highlight);
}
}
}
SolrSearchResult solrSearchResult = new SolrSearchResult(query, name);
/**
* @todo put all this in the constructor?
*/
List<String> states = (List<String>) solrDocument.getFieldValue(SearchFields.PUBLICATION_STATUS);
if (states != null) {
// set list of all statuses
// this method also sets booleans for individual statuses
solrSearchResult.setPublicationStatuses(states);
}
// logger.info(id + ": " + description);
solrSearchResult.setId(id);
solrSearchResult.setEntityId(entityid);
if (retrieveEntities) {
solrSearchResult.setEntity(dvObjectService.findDvObject(entityid));
}
solrSearchResult.setIdentifier(identifier);
solrSearchResult.setPersistentUrl(persistentUrl);
solrSearchResult.setType(type);
solrSearchResult.setScore(score);
solrSearchResult.setNameSort(nameSort);
solrSearchResult.setReleaseOrCreateDate(release_or_create_date);
solrSearchResult.setDateToDisplayOnCard(dateToDisplayOnCard);
solrSearchResult.setMatchedFields(matchedFields);
solrSearchResult.setHighlightsAsList(highlights);
solrSearchResult.setHighlightsMap(highlightsMap);
solrSearchResult.setHighlightsAsMap(highlightsMap3);
Map<String, String> parent = new HashMap<>();
String description = (String) solrDocument.getFieldValue(SearchFields.DESCRIPTION);
solrSearchResult.setDescriptionNoSnippet(description);
solrSearchResult.setDeaccessionReason(deaccessionReason);
solrSearchResult.setDvTree(dvTree);
String originSource = (String) solrDocument.getFieldValue(SearchFields.METADATA_SOURCE);
if (IndexServiceBean.HARVESTED.equals(originSource)) {
solrSearchResult.setHarvested(true);
}
/**
* @todo start using SearchConstants class here
*/
if (type.equals("dataverses")) {
solrSearchResult.setName(name);
solrSearchResult.setHtmlUrl(baseUrl + SystemConfig.DATAVERSE_PATH + identifier);
// Do not set the ImageUrl, let the search include fragment fill in
// the thumbnail, similarly to how the dataset and datafile cards
// are handled.
// solrSearchResult.setImageUrl(baseUrl + "/api/access/dvCardImage/" + entityid);
/**
* @todo Expose this API URL after "dvs" is changed to
* "dataverses". Also, is an API token required for published
* dataverses? Michael: url changed.
*/
// solrSearchResult.setApiUrl(baseUrl + "/api/dataverses/" + entityid);
} else if (type.equals("datasets")) {
solrSearchResult.setHtmlUrl(baseUrl + "/dataset.xhtml?globalId=" + identifier);
solrSearchResult.setApiUrl(baseUrl + "/api/datasets/" + entityid);
// Image url now set via thumbnail api
// solrSearchResult.setImageUrl(baseUrl + "/api/access/dsCardImage/" + datasetVersionId);
// No, we don't want to set the base64 thumbnails here.
// We want to do it inside SearchIncludeFragment, AND ONLY once the rest of the
// page has already loaded.
// DatasetVersion datasetVersion = datasetVersionService.find(datasetVersionId);
// if (datasetVersion != null){
// solrSearchResult.setDatasetThumbnail(datasetVersion.getDataset().getDatasetThumbnail(datasetVersion));
// }
/**
* @todo Could use getFieldValues (plural) here.
*/
List<String> datasetDescriptions = (List<String>) solrDocument.getFieldValue(SearchFields.DATASET_DESCRIPTION);
if (datasetDescriptions != null) {
String firstDatasetDescription = datasetDescriptions.get(0);
if (firstDatasetDescription != null) {
solrSearchResult.setDescriptionNoSnippet(firstDatasetDescription);
}
}
solrSearchResult.setDatasetVersionId(datasetVersionId);
solrSearchResult.setCitation(citation);
solrSearchResult.setCitationHtml(citationPlainHtml);
if (title != null) {
// solrSearchResult.setTitle((String) titles.get(0));
solrSearchResult.setTitle(title);
} else {
logger.fine("No title indexed. Setting to empty string to prevent NPE. Dataset id " + entityid + " and version id " + datasetVersionId);
solrSearchResult.setTitle("");
}
List<String> authors = (List) solrDocument.getFieldValues(DatasetFieldConstant.authorName);
if (authors != null) {
solrSearchResult.setDatasetAuthors(authors);
}
} else if (type.equals("files")) {
String parentGlobalId = null;
Object parentGlobalIdObject = solrDocument.getFieldValue(SearchFields.PARENT_IDENTIFIER);
if (parentGlobalIdObject != null) {
parentGlobalId = (String) parentGlobalIdObject;
parent.put(SolrSearchResult.PARENT_IDENTIFIER, parentGlobalId);
}
solrSearchResult.setHtmlUrl(baseUrl + "/dataset.xhtml?persistentId=" + parentGlobalId);
solrSearchResult.setDownloadUrl(baseUrl + "/api/access/datafile/" + entityid);
/**
* @todo We are not yet setting the API URL for files because
* not all files have metadata. Only subsettable files (those
* with a datatable) seem to have metadata. Furthermore, the
* response is in XML whereas the rest of the Search API returns
* JSON.
*/
// solrSearchResult.setApiUrl(baseUrl + "/api/meta/datafile/" + entityid);
// solrSearchResult.setImageUrl(baseUrl + "/api/access/fileCardImage/" + entityid);
solrSearchResult.setName(name);
solrSearchResult.setFiletype(filetype);
solrSearchResult.setFileContentType(fileContentType);
Object fileSizeInBytesObject = solrDocument.getFieldValue(SearchFields.FILE_SIZE_IN_BYTES);
if (fileSizeInBytesObject != null) {
try {
long fileSizeInBytesLong = (long) fileSizeInBytesObject;
solrSearchResult.setFileSizeInBytes(fileSizeInBytesLong);
} catch (ClassCastException ex) {
logger.info("Could not cast file " + entityid + " to long for " + SearchFields.FILE_SIZE_IN_BYTES + ": " + ex.getLocalizedMessage());
}
}
solrSearchResult.setFileMd5((String) solrDocument.getFieldValue(SearchFields.FILE_MD5));
try {
solrSearchResult.setFileChecksumType(DataFile.ChecksumType.fromString((String) solrDocument.getFieldValue(SearchFields.FILE_CHECKSUM_TYPE)));
} catch (IllegalArgumentException ex) {
logger.info("Exception setting setFileChecksumType: " + ex);
}
solrSearchResult.setFileChecksumValue((String) solrDocument.getFieldValue(SearchFields.FILE_CHECKSUM_VALUE));
solrSearchResult.setUnf((String) solrDocument.getFieldValue(SearchFields.UNF));
solrSearchResult.setDatasetVersionId(datasetVersionId);
List<String> fileCategories = (List) solrDocument.getFieldValues(SearchFields.FILE_TAG);
if (fileCategories != null) {
solrSearchResult.setFileCategories(fileCategories);
}
List<String> tabularDataTags = (List) solrDocument.getFieldValues(SearchFields.TABDATA_TAG);
if (tabularDataTags != null) {
Collections.sort(tabularDataTags);
solrSearchResult.setTabularDataTags(tabularDataTags);
}
}
/**
* @todo store PARENT_ID as a long instead and cast as such
*/
parent.put("id", (String) solrDocument.getFieldValue(SearchFields.PARENT_ID));
parent.put("name", (String) solrDocument.getFieldValue(SearchFields.PARENT_NAME));
parent.put("citation", (String) solrDocument.getFieldValue(SearchFields.PARENT_CITATION));
solrSearchResult.setParent(parent);
solrSearchResults.add(solrSearchResult);
}
Map<String, List<String>> spellingSuggestionsByToken = new HashMap<>();
SpellCheckResponse spellCheckResponse = queryResponse.getSpellCheckResponse();
if (spellCheckResponse != null) {
List<SpellCheckResponse.Suggestion> suggestions = spellCheckResponse.getSuggestions();
for (SpellCheckResponse.Suggestion suggestion : suggestions) {
spellingSuggestionsByToken.put(suggestion.getToken(), suggestion.getAlternatives());
}
}
List<FacetCategory> facetCategoryList = new ArrayList<>();
List<FacetCategory> typeFacetCategories = new ArrayList<>();
boolean hidePublicationStatusFacet = true;
boolean draftsAvailable = false;
boolean unpublishedAvailable = false;
boolean deaccessionedAvailable = false;
boolean hideMetadataSourceFacet = true;
for (FacetField facetField : queryResponse.getFacetFields()) {
FacetCategory facetCategory = new FacetCategory();
List<FacetLabel> facetLabelList = new ArrayList<>();
int numMetadataSources = 0;
for (FacetField.Count facetFieldCount : facetField.getValues()) {
// logger.info("field: " + facetField.getName() + " " + facetFieldCount.getName() + " (" + facetFieldCount.getCount() + ")");
if (facetFieldCount.getCount() > 0) {
FacetLabel facetLabel = new FacetLabel(facetFieldCount.getName(), facetFieldCount.getCount());
// quote field facets
facetLabel.setFilterQuery(facetField.getName() + ":\"" + facetFieldCount.getName() + "\"");
facetLabelList.add(facetLabel);
if (facetField.getName().equals(SearchFields.PUBLICATION_STATUS)) {
if (facetLabel.getName().equals(IndexServiceBean.getUNPUBLISHED_STRING())) {
unpublishedAvailable = true;
} else if (facetLabel.getName().equals(IndexServiceBean.getDRAFT_STRING())) {
draftsAvailable = true;
} else if (facetLabel.getName().equals(IndexServiceBean.getDEACCESSIONED_STRING())) {
deaccessionedAvailable = true;
}
}
if (facetField.getName().equals(SearchFields.METADATA_SOURCE)) {
numMetadataSources++;
}
}
}
if (numMetadataSources > 1) {
hideMetadataSourceFacet = false;
}
facetCategory.setName(facetField.getName());
// hopefully people will never see the raw facetField.getName() because it may well have an _s at the end
facetCategory.setFriendlyName(facetField.getName());
/**
* @todo hmm, we thought we wanted the datasetFields array to go
* away once we have more granularity than findAll() available per
* the todo above but we need a way to lookup by Solr field, so
* we'll build a hashmap
*/
for (DatasetFieldType datasetField : datasetFields) {
String solrFieldNameForDataset = datasetField.getSolrField().getNameFacetable();
String friendlyName = datasetField.getDisplayName();
if (solrFieldNameForDataset != null && facetField.getName().endsWith(datasetField.getTmpNullFieldTypeIdentifier())) {
// give it the non-friendly name so we remember to update the reference data script for datasets
facetCategory.setName(facetField.getName());
} else if (solrFieldNameForDataset != null && facetField.getName().equals(solrFieldNameForDataset)) {
if (friendlyName != null && !friendlyName.isEmpty()) {
facetCategory.setFriendlyName(friendlyName);
// stop examining available dataset fields. we found a match
break;
}
}
datasetfieldFriendlyNamesBySolrField.put(datasetField.getSolrField().getNameFacetable(), friendlyName);
}
/**
* @todo get rid of this crazy reflection, per todo above... or
* should we... let's put into a hash the friendly names of facet
* categories, indexed by Solr field
*/
for (Field fieldObject : staticSearchFields) {
String name = fieldObject.getName();
String staticSearchField = null;
try {
staticSearchField = (String) fieldObject.get(searchFieldsObject);
} catch (IllegalArgumentException | IllegalAccessException ex) {
Logger.getLogger(SearchServiceBean.class.getName()).log(Level.SEVERE, null, ex);
}
if (staticSearchField != null && facetField.getName().equals(staticSearchField)) {
String[] parts = name.split("_");
StringBuilder stringBuilder = new StringBuilder();
for (String part : parts) {
stringBuilder.append(getCapitalizedName(part.toLowerCase()) + " ");
}
String friendlyNameWithTrailingSpace = stringBuilder.toString();
String friendlyName = friendlyNameWithTrailingSpace.replaceAll(" $", "");
facetCategory.setFriendlyName(friendlyName);
// logger.info("adding <<<" + staticSearchField + ":" + friendlyName + ">>>");
staticSolrFieldFriendlyNamesBySolrField.put(staticSearchField, friendlyName);
// stop examining the declared/static fields in the SearchFields object. we found a match
break;
}
}
facetCategory.setFacetLabel(facetLabelList);
if (!facetLabelList.isEmpty()) {
if (facetCategory.getName().equals(SearchFields.TYPE)) {
// the "type" facet is special, these are not
typeFacetCategories.add(facetCategory);
} else if (facetCategory.getName().equals(SearchFields.PUBLICATION_STATUS)) {
if (unpublishedAvailable || draftsAvailable || deaccessionedAvailable) {
hidePublicationStatusFacet = false;
}
if (!hidePublicationStatusFacet) {
facetCategoryList.add(facetCategory);
}
} else if (facetCategory.getName().equals(SearchFields.METADATA_SOURCE)) {
if (!hideMetadataSourceFacet) {
facetCategoryList.add(facetCategory);
}
} else {
facetCategoryList.add(facetCategory);
}
}
}
// for now the only range facet is citation year
for (RangeFacet<String, String> rangeFacet : queryResponse.getFacetRanges()) {
FacetCategory facetCategory = new FacetCategory();
List<FacetLabel> facetLabelList = new ArrayList<>();
for (Object rfObj : rangeFacet.getCounts()) {
RangeFacet.Count rangeFacetCount = (RangeFacet.Count) rfObj;
String valueString = rangeFacetCount.getValue();
Integer start = Integer.parseInt(valueString);
Integer end = start + Integer.parseInt(rangeFacet.getGap().toString());
// to avoid overlapping dates
end = end - 1;
if (rangeFacetCount.getCount() > 0) {
FacetLabel facetLabel = new FacetLabel(start + "-" + end, new Long(rangeFacetCount.getCount()));
// special [12 TO 34] syntax for range facets
facetLabel.setFilterQuery(rangeFacet.getName() + ":" + "[" + start + " TO " + end + "]");
facetLabelList.add(facetLabel);
}
}
facetCategory.setName(rangeFacet.getName());
facetCategory.setFacetLabel(facetLabelList);
// reverse to show the newest citation year range at the top
List<FacetLabel> facetLabelListReversed = new ArrayList<>();
ListIterator<FacetLabel> li = facetLabelList.listIterator(facetLabelList.size());
while (li.hasPrevious()) {
facetLabelListReversed.add(li.previous());
}
facetCategory.setFacetLabel(facetLabelListReversed);
if (!facetLabelList.isEmpty()) {
facetCategoryList.add(facetCategory);
}
}
SolrQueryResponse solrQueryResponse = new SolrQueryResponse(solrQuery);
solrQueryResponse.setSolrSearchResults(solrSearchResults);
solrQueryResponse.setSpellingSuggestionsByToken(spellingSuggestionsByToken);
solrQueryResponse.setFacetCategoryList(facetCategoryList);
solrQueryResponse.setTypeFacetCategories(typeFacetCategories);
solrQueryResponse.setNumResultsFound(queryResponse.getResults().getNumFound());
solrQueryResponse.setResultsStart(queryResponse.getResults().getStart());
solrQueryResponse.setDatasetfieldFriendlyNamesBySolrField(datasetfieldFriendlyNamesBySolrField);
solrQueryResponse.setStaticSolrFieldFriendlyNamesBySolrField(staticSolrFieldFriendlyNamesBySolrField);
String[] filterQueriesArray = solrQuery.getFilterQueries();
if (filterQueriesArray != null) {
// null check added because these tests were failing: mvn test -Dtest=SearchIT
List<String> actualFilterQueries = Arrays.asList(filterQueriesArray);
logger.fine("actual filter queries: " + actualFilterQueries);
solrQueryResponse.setFilterQueriesActual(actualFilterQueries);
} else {
// how often is this null?
logger.info("solrQuery.getFilterQueries() was null");
}
solrQueryResponse.setDvObjectCounts(queryResponse.getFacetField("dvObjectType"));
solrQueryResponse.setPublicationStatusCounts(queryResponse.getFacetField("publicationStatus"));
return solrQueryResponse;
}
use of org.apache.solr.client.solrj.response.SpellCheckResponse in project jackrabbit-oak by apache.
the class SolrQueryIndex method putSpellChecks.
private void putSpellChecks(SpellCheckResponse spellCheckResponse, final Deque<SolrResultRow> queue, Filter filter, OakSolrConfiguration configuration, SolrClient solrServer) throws IOException, SolrServerException {
List<SpellCheckResponse.Suggestion> suggestions = spellCheckResponse.getSuggestions();
Collection<String> alternatives = new ArrayList<String>(suggestions.size());
for (SpellCheckResponse.Suggestion suggestion : suggestions) {
alternatives.addAll(suggestion.getAlternatives());
}
// ACL filter spellcheck results
for (String alternative : alternatives) {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setParam("q", alternative);
solrQuery.setParam("df", configuration.getCatchAllField());
solrQuery.setParam("q.op", "AND");
solrQuery.setParam("rows", "100");
QueryResponse suggestQueryResponse = solrServer.query(solrQuery);
SolrDocumentList results = suggestQueryResponse.getResults();
if (results != null && results.getNumFound() > 0) {
for (SolrDocument doc : results) {
if (filter.isAccessible(String.valueOf(doc.getFieldValue(configuration.getPathField())))) {
queue.add(new SolrResultRow(alternative));
break;
}
}
}
}
}
use of org.apache.solr.client.solrj.response.SpellCheckResponse in project ddf by codice.
the class SolrMetacardClientImplTest method testQuerySpellCheckOn.
@Test
public void testQuerySpellCheckOn() throws Exception {
QueryRequest request = createQuery(builder.attribute("anyText").is().like().text("normal"));
request.getProperties().put("spellcheck", new Boolean("true"));
List<String> names = Collections.singletonList("title");
List<String> values = Collections.singletonList("normal");
SpellCheckResponse spellCheckResponse = mock(SpellCheckResponse.class);
List<Collation> collations = new ArrayList<>();
Collation collation = new Collation();
collation.setCollationQueryString("real");
collation.setNumberOfHits(2);
collations.add(collation);
Map<String, String> attributes = createAttributes(names, values);
mockDynamicSchemsolverCalls(createAttributeDescriptor(names), attributes);
when(queryResponse.getSpellCheckResponse()).thenReturn(spellCheckResponse);
when(queryResponse.getSpellCheckResponse().getCollatedResults()).thenReturn(collations);
when(queryResponse.getResults()).thenReturn(createSolrDocumentList(attributes));
List<Result> results = clientImpl.query(request).getResults();
assertThat(results.size(), is(1));
verify(queryResponse, times(2)).getResults();
}
Aggregations