use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class SpellCheckCollatorTest method testCollateWithMultipleRequestHandlers.
@Test
public void testCollateWithMultipleRequestHandlers() throws Exception {
SolrCore core = h.getCore();
SearchComponent speller = core.getSearchComponent("spellcheck");
assertTrue("speller is null and it shouldn't be", speller != null);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(SpellCheckComponent.COMPONENT_NAME, "true");
params.add(SpellingParams.SPELLCHECK_DICT, "multipleFields");
params.add(SpellingParams.SPELLCHECK_BUILD, "true");
params.add(SpellingParams.SPELLCHECK_COUNT, "10");
params.add(SpellingParams.SPELLCHECK_COLLATE, "true");
params.add(SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "1");
params.add(SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1");
params.add(CommonParams.Q, "peac");
//SpellCheckCompRH has no "qf" defined. It will not find "peace" from "peac" despite it being in the dictionary
//because requrying against this Request Handler results in 0 hits.
SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
NamedList values = rsp.getValues();
NamedList spellCheck = (NamedList) values.get("spellcheck");
NamedList collationHolder = (NamedList) spellCheck.get("collations");
String singleCollation = (String) collationHolder.get("collation");
assertNull(singleCollation);
//SpellCheckCompRH1 has "lowerfilt1" defined in the "qf" param. It will find "peace" from "peac" because
//requrying field "lowerfilt1" returns the hit.
params.remove(SpellingParams.SPELLCHECK_BUILD);
handler = core.getRequestHandler("spellCheckCompRH1");
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
values = rsp.getValues();
spellCheck = (NamedList) values.get("spellcheck");
collationHolder = (NamedList) spellCheck.get("collations");
singleCollation = (String) collationHolder.get("collation");
assertEquals(singleCollation, "peace");
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class SpellCheckCollatorTest method testExtendedCollate.
@Test
public void testExtendedCollate() throws Exception {
SolrCore core = h.getCore();
SearchComponent speller = core.getSearchComponent("spellcheck");
assertTrue("speller is null and it shouldn't be", speller != null);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(CommonParams.QT, "spellCheckCompRH");
params.add(CommonParams.Q, "lowerfilt:(+fauth +home +loane)");
params.add(SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true");
params.add(SpellCheckComponent.COMPONENT_NAME, "true");
params.add(SpellingParams.SPELLCHECK_BUILD, "true");
params.add(SpellingParams.SPELLCHECK_COUNT, "10");
params.add(SpellingParams.SPELLCHECK_COLLATE, "true");
// Testing backwards-compatible behavior.
// Returns 1 collation as a single string.
// All words are "correct" per the dictionary, but this collation would
// return no results if tried.
SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
NamedList values = rsp.getValues();
NamedList spellCheck = (NamedList) values.get("spellcheck");
NamedList collationHolder = (NamedList) spellCheck.get("collations");
String singleCollation = (String) collationHolder.get("collation");
assertEquals("lowerfilt:(+faith +homer +loaves)", singleCollation);
// Testing backwards-compatible response format but will only return a
// collation that would return results.
params.remove(SpellingParams.SPELLCHECK_BUILD);
params.add(SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "5");
params.add(SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1");
handler = core.getRequestHandler("spellCheckCompRH");
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
values = rsp.getValues();
spellCheck = (NamedList) values.get("spellcheck");
collationHolder = (NamedList) spellCheck.get("collations");
singleCollation = (String) collationHolder.get("collation");
assertEquals("lowerfilt:(+faith +hope +loaves)", singleCollation);
// Testing returning multiple collations if more than one valid
// combination exists.
params.remove(SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES);
params.remove(SpellingParams.SPELLCHECK_MAX_COLLATIONS);
params.add(SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "10");
params.add(SpellingParams.SPELLCHECK_MAX_COLLATIONS, "2");
handler = core.getRequestHandler("spellCheckCompRH");
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
values = rsp.getValues();
spellCheck = (NamedList) values.get("spellcheck");
collationHolder = (NamedList) spellCheck.get("collations");
List<String> collations = collationHolder.getAll("collation");
assertTrue(collations.size() == 2);
for (String multipleCollation : collations) {
assertTrue(multipleCollation.equals("lowerfilt:(+faith +hope +love)") || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)"));
}
// Testing return multiple collations with expanded collation response
// format.
params.add(SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true");
handler = core.getRequestHandler("spellCheckCompRH");
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
values = rsp.getValues();
spellCheck = (NamedList) values.get("spellcheck");
collationHolder = (NamedList) spellCheck.get("collations");
List<NamedList> expandedCollationList = collationHolder.getAll("collation");
Set<String> usedcollations = new HashSet<>();
assertTrue(expandedCollationList.size() == 2);
for (NamedList expandedCollation : expandedCollationList) {
String multipleCollation = (String) expandedCollation.get("collationQuery");
assertTrue(multipleCollation.equals("lowerfilt:(+faith +hope +love)") || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)"));
assertTrue(!usedcollations.contains(multipleCollation));
usedcollations.add(multipleCollation);
int hits = (Integer) expandedCollation.get("hits");
assertTrue(hits == 1);
NamedList misspellingsAndCorrections = (NamedList) expandedCollation.get("misspellingsAndCorrections");
assertTrue(misspellingsAndCorrections.size() == 3);
String correctionForFauth = (String) misspellingsAndCorrections.get("fauth");
String correctionForHome = (String) misspellingsAndCorrections.get("home");
String correctionForLoane = (String) misspellingsAndCorrections.get("loane");
assertTrue(correctionForFauth.equals("faith"));
assertTrue(correctionForHome.equals("hope"));
assertTrue(correctionForLoane.equals("love") || correctionForLoane.equals("loaves"));
}
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class SpellCheckCollatorTest method testCollationWithHypens.
@Test
public void testCollationWithHypens() throws Exception {
SolrCore core = h.getCore();
SearchComponent speller = core.getSearchComponent("spellcheck");
assertTrue("speller is null and it shouldn't be", speller != null);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(SpellCheckComponent.COMPONENT_NAME, "true");
params.add(SpellingParams.SPELLCHECK_BUILD, "true");
params.add(SpellingParams.SPELLCHECK_COUNT, "10");
params.add(SpellingParams.SPELLCHECK_COLLATE, "true");
params.add(CommonParams.Q, "lowerfilt:(hypenated-wotd)");
{
SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
NamedList values = rsp.getValues();
NamedList spellCheck = (NamedList) values.get("spellcheck");
NamedList collationHolder = (NamedList) spellCheck.get("collations");
List<String> collations = collationHolder.getAll("collation");
assertTrue(collations.size() == 1);
String collation = collations.iterator().next();
assertTrue("Incorrect collation: " + collation, "lowerfilt:(hyphenated-word)".equals(collation));
}
params.remove(CommonParams.Q);
params.add("defType", "dismax");
params.add("qf", "lowerfilt");
params.add(CommonParams.Q, "hypenated-wotd");
{
SolrRequestHandler handler = core.getRequestHandler("spellCheckCompRH");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.add("responseHeader", new SimpleOrderedMap());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
req.close();
NamedList values = rsp.getValues();
NamedList spellCheck = (NamedList) values.get("spellcheck");
NamedList collationHolder = (NamedList) spellCheck.get("collations");
List<String> collations = collationHolder.getAll("collation");
assertTrue(collations.size() == 1);
String collation = collations.iterator().next();
assertTrue("Incorrect collation: " + collation, "hyphenated-word".equals(collation));
}
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class IgnoreCommitOptimizeUpdateProcessorFactoryTest method processCommit.
SolrQueryResponse processCommit(final String chain, boolean optimize, Boolean commitEndPoint) throws IOException {
SolrCore core = h.getCore();
UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain);
assertNotNull("No Chain named: " + chain, pc);
SolrQueryResponse rsp = new SolrQueryResponse();
SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
if (commitEndPoint != null) {
((ModifiableSolrParams) req.getParams()).set(DistributedUpdateProcessor.COMMIT_END_POINT, commitEndPoint.booleanValue());
}
try {
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
CommitUpdateCommand cmd = new CommitUpdateCommand(req, false);
cmd.optimize = optimize;
UpdateRequestProcessor processor = pc.createProcessor(req, rsp);
processor.processCommit(cmd);
} finally {
SolrRequestInfo.clearRequestInfo();
req.close();
}
return rsp;
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class CarrotClusteringEngine method getDocuments.
/**
* Prepares Carrot2 documents for clustering.
*/
private List<Document> getDocuments(SolrDocumentList solrDocList, Map<SolrDocument, Integer> docIds, Query query, final SolrQueryRequest sreq) throws IOException {
SolrHighlighter highlighter = null;
SolrParams solrParams = sreq.getParams();
SolrCore core = sreq.getCore();
String urlField = solrParams.get(CarrotParams.URL_FIELD_NAME, "url");
String titleFieldSpec = solrParams.get(CarrotParams.TITLE_FIELD_NAME, "title");
String snippetFieldSpec = solrParams.get(CarrotParams.SNIPPET_FIELD_NAME, titleFieldSpec);
String languageField = solrParams.get(CarrotParams.LANGUAGE_FIELD_NAME, null);
// Maps Solr field names to Carrot2 custom field names
Map<String, String> customFields = getCustomFieldsMap(solrParams);
// Parse language code map string into a map
Map<String, String> languageCodeMap = new HashMap<>();
if (StringUtils.isNotBlank(languageField)) {
for (String pair : solrParams.get(CarrotParams.LANGUAGE_CODE_MAP, "").split("[, ]")) {
final String[] split = pair.split(":");
if (split.length == 2 && StringUtils.isNotBlank(split[0]) && StringUtils.isNotBlank(split[1])) {
languageCodeMap.put(split[0], split[1]);
} else {
log.warn("Unsupported format for " + CarrotParams.LANGUAGE_CODE_MAP + ": '" + pair + "'. Skipping this mapping.");
}
}
}
// Get the documents
boolean produceSummary = solrParams.getBool(CarrotParams.PRODUCE_SUMMARY, false);
SolrQueryRequest req = null;
String[] snippetFieldAry = null;
if (produceSummary) {
highlighter = HighlightComponent.getHighlighter(core);
if (highlighter != null) {
Map<String, Object> args = new HashMap<>();
snippetFieldAry = snippetFieldSpec.split("[, ]");
args.put(HighlightParams.FIELDS, snippetFieldAry);
args.put(HighlightParams.HIGHLIGHT, "true");
//we don't care about actually highlighting the area
args.put(HighlightParams.SIMPLE_PRE, "");
args.put(HighlightParams.SIMPLE_POST, "");
args.put(HighlightParams.FRAGSIZE, solrParams.getInt(CarrotParams.SUMMARY_FRAGSIZE, solrParams.getInt(HighlightParams.FRAGSIZE, 100)));
args.put(HighlightParams.SNIPPETS, solrParams.getInt(CarrotParams.SUMMARY_SNIPPETS, solrParams.getInt(HighlightParams.SNIPPETS, 1)));
req = new LocalSolrQueryRequest(core, query.toString(), "", 0, 1, args) {
@Override
public SolrIndexSearcher getSearcher() {
return sreq.getSearcher();
}
};
} else {
log.warn("No highlighter configured, cannot produce summary");
produceSummary = false;
}
}
Iterator<SolrDocument> docsIter = solrDocList.iterator();
List<Document> result = new ArrayList<>(solrDocList.size());
float[] scores = { 1.0f };
int[] docsHolder = new int[1];
Query theQuery = query;
while (docsIter.hasNext()) {
SolrDocument sdoc = docsIter.next();
String snippet = null;
// See comment in ClusteringComponent#finishStage().
if (produceSummary && docIds != null) {
docsHolder[0] = docIds.get(sdoc).intValue();
DocList docAsList = new DocSlice(0, 1, docsHolder, scores, 1, 1.0f);
NamedList<Object> highlights = highlighter.doHighlighting(docAsList, theQuery, req, snippetFieldAry);
if (highlights != null && highlights.size() == 1) {
// should only be one value given our setup
// should only be one document
@SuppressWarnings("unchecked") NamedList<String[]> tmp = (NamedList<String[]>) highlights.getVal(0);
final StringBuilder sb = new StringBuilder();
for (int j = 0; j < snippetFieldAry.length; j++) {
// Join fragments with a period, so that Carrot2 does not create
// cross-fragment phrases, such phrases rarely make sense.
String[] highlt = tmp.get(snippetFieldAry[j]);
if (highlt != null && highlt.length > 0) {
for (int i = 0; i < highlt.length; i++) {
sb.append(highlt[i]);
sb.append(" . ");
}
}
}
snippet = sb.toString();
}
}
// If summaries not enabled or summary generation failed, use full content.
if (snippet == null) {
snippet = getConcatenated(sdoc, snippetFieldSpec);
}
// Create a Carrot2 document
Document carrotDocument = new Document(getConcatenated(sdoc, titleFieldSpec), snippet, ObjectUtils.toString(sdoc.getFieldValue(urlField), ""));
// Store Solr id of the document, we need it to map document instances
// found in clusters back to identifiers.
carrotDocument.setField(SOLR_DOCUMENT_ID, sdoc.getFieldValue(idFieldName));
// Set language
if (StringUtils.isNotBlank(languageField)) {
Collection<Object> languages = sdoc.getFieldValues(languageField);
if (languages != null) {
// Use the first Carrot2-supported language
for (Object l : languages) {
String lang = ObjectUtils.toString(l, "");
if (languageCodeMap.containsKey(lang)) {
lang = languageCodeMap.get(lang);
}
// language variants, such as 'zh-cn', but Carrot2 uses underscores.
if (lang.indexOf('-') > 0) {
lang = lang.replace('-', '_');
}
// If the language is supported by Carrot2, we'll get a non-null value
final LanguageCode carrot2Language = LanguageCode.forISOCode(lang);
if (carrot2Language != null) {
carrotDocument.setLanguage(carrot2Language);
break;
}
}
}
}
// Add custom fields
if (customFields != null) {
for (Entry<String, String> entry : customFields.entrySet()) {
carrotDocument.setField(entry.getValue(), sdoc.getFieldValue(entry.getKey()));
}
}
result.add(carrotDocument);
}
return result;
}
Aggregations