use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class TermVectorReusingLeafReader method doHighlightingByHighlighter.
/** Highlights and returns the highlight object for this field -- a String[] by default. Null if none. */
@SuppressWarnings("unchecked")
protected Object doHighlightingByHighlighter(Document doc, int docId, SchemaField schemaField, Query query, IndexReader reader, SolrQueryRequest req) throws IOException {
final SolrParams params = req.getParams();
final String fieldName = schemaField.getName();
final int mvToExamine = params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, (schemaField.multiValued()) ? Integer.MAX_VALUE : 1);
// Technically this is the max *fragments* (snippets), not max values:
int mvToMatch = params.getFieldInt(fieldName, HighlightParams.MAX_MULTIVALUED_TO_MATCH, Integer.MAX_VALUE);
if (mvToExamine <= 0 || mvToMatch <= 0) {
return null;
}
int maxCharsToAnalyze = params.getFieldInt(fieldName, HighlightParams.MAX_CHARS, DEFAULT_MAX_CHARS);
if (maxCharsToAnalyze < 0) {
//e.g. -1
maxCharsToAnalyze = Integer.MAX_VALUE;
}
List<String> fieldValues = getFieldValues(doc, fieldName, mvToExamine, maxCharsToAnalyze, req);
if (fieldValues.isEmpty()) {
return null;
}
// preserve order of values in a multiValued list
boolean preserveMulti = params.getFieldBool(fieldName, HighlightParams.PRESERVE_MULTI, false);
int numFragments = getMaxSnippets(fieldName, params);
boolean mergeContiguousFragments = isMergeContiguousFragments(fieldName, params);
List<TextFragment> frags = new ArrayList<>();
//Try term vectors, which is faster
// note: offsets are minimally sufficient for this HL.
final Fields tvFields = schemaField.storeTermOffsets() ? reader.getTermVectors(docId) : null;
final TokenStream tvStream = TokenSources.getTermVectorTokenStreamOrNull(fieldName, tvFields, maxCharsToAnalyze - 1);
// We need to wrap in OffsetWindowTokenFilter if multi-valued
final OffsetWindowTokenFilter tvWindowStream;
if (tvStream != null && fieldValues.size() > 1) {
tvWindowStream = new OffsetWindowTokenFilter(tvStream);
} else {
tvWindowStream = null;
}
for (String thisText : fieldValues) {
if (mvToMatch <= 0 || maxCharsToAnalyze <= 0) {
break;
}
TokenStream tstream;
if (tvWindowStream != null) {
// if we have a multi-valued field with term vectors, then get the next offset window
tstream = tvWindowStream.advanceToNextWindowOfLength(thisText.length());
} else if (tvStream != null) {
// single-valued with term vectors
tstream = tvStream;
} else {
// fall back to analyzer
tstream = createAnalyzerTStream(schemaField, thisText);
}
Highlighter highlighter;
if (params.getFieldBool(fieldName, HighlightParams.USE_PHRASE_HIGHLIGHTER, true)) {
// We're going to call getPhraseHighlighter and it might consume the tokenStream. If it does, the tokenStream
// needs to implement reset() efficiently.
//If the tokenStream is right from the term vectors, then CachingTokenFilter is unnecessary.
// It should be okay if OffsetLimit won't get applied in this case.
final TokenStream tempTokenStream;
if (tstream != tvStream) {
if (maxCharsToAnalyze >= thisText.length()) {
tempTokenStream = new CachingTokenFilter(tstream);
} else {
tempTokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
}
} else {
tempTokenStream = tstream;
}
// get highlighter
highlighter = getPhraseHighlighter(query, fieldName, req, tempTokenStream);
// if the CachingTokenFilter was consumed then use it going forward.
if (tempTokenStream instanceof CachingTokenFilter && ((CachingTokenFilter) tempTokenStream).isCached()) {
tstream = tempTokenStream;
}
//tstream.reset(); not needed; getBestTextFragments will reset it.
} else {
// use "the old way"
highlighter = getHighlighter(query, fieldName, req);
}
highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
maxCharsToAnalyze -= thisText.length();
// Highlight!
try {
TextFragment[] bestTextFragments = highlighter.getBestTextFragments(tstream, thisText, mergeContiguousFragments, numFragments);
for (TextFragment bestTextFragment : bestTextFragments) {
if (//can happen via mergeContiguousFragments
bestTextFragment == null)
continue;
// normally we want a score (must be highlighted), but if preserveMulti then we return a snippet regardless.
if (bestTextFragment.getScore() > 0 || preserveMulti) {
frags.add(bestTextFragment);
if (bestTextFragment.getScore() > 0)
// note: limits fragments (for multi-valued fields), not quite the number of values
--mvToMatch;
}
}
} catch (InvalidTokenOffsetsException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
}
// Put the fragments onto the Solr response (docSummaries)
if (frags.size() > 0) {
// sort such that the fragments with the highest score come first
if (!preserveMulti) {
Collections.sort(frags, (arg0, arg1) -> Float.compare(arg1.getScore(), arg0.getScore()));
}
// Truncate list to hl.snippets, but not when hl.preserveMulti
if (frags.size() > numFragments && !preserveMulti) {
frags = frags.subList(0, numFragments);
}
return getResponseForFragments(frags, req);
}
//no highlights for this field
return null;
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class PostingsSolrHighlighter method getHighlighter.
@Override
protected UnifiedHighlighter getHighlighter(SolrQueryRequest req) {
// Adjust the highlight parameters to match what the old PostingsHighlighter had.
ModifiableSolrParams invariants = new ModifiableSolrParams();
invariants.set(HighlightParams.OFFSET_SOURCE, "POSTINGS");
invariants.set(HighlightParams.FIELD_MATCH, true);
invariants.set(HighlightParams.USE_PHRASE_HIGHLIGHTER, false);
invariants.set(HighlightParams.FRAGSIZE, -1);
ModifiableSolrParams defaults = new ModifiableSolrParams();
defaults.set(HighlightParams.DEFAULT_SUMMARY, true);
defaults.set(HighlightParams.TAG_ELLIPSIS, "... ");
SolrParams newParams = SolrParams.wrapDefaults(// this takes precedence
invariants, SolrParams.wrapDefaults(// then this (original)
req.getParams(), // finally our defaults
defaults));
try (LocalSolrQueryRequest fakeReq = new LocalSolrQueryRequest(req.getCore(), newParams)) {
return super.getHighlighter(fakeReq);
}
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class SimpleFacets method parseParams.
protected ParsedParams parseParams(String type, String param) throws SyntaxError, IOException {
SolrParams localParams = QueryParsing.getLocalParams(param, req.getParams());
DocSet docs = docsOrig;
String facetValue = param;
String key = param;
List<String> tags = Collections.emptyList();
int threads = -1;
if (localParams == null) {
SolrParams params = global;
SolrParams required = new RequiredSolrParams(params);
return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
}
SolrParams params = SolrParams.wrapDefaults(localParams, global);
SolrParams required = new RequiredSolrParams(params);
// remove local params unless it's a query
if (type != FacetParams.FACET_QUERY) {
// TODO Cut over to an Enum here
facetValue = localParams.get(CommonParams.VALUE);
}
// reset set the default key now that localParams have been removed
key = facetValue;
// allow explicit set of the key
key = localParams.get(CommonParams.OUTPUT_KEY, key);
String tagStr = localParams.get(CommonParams.TAG);
tags = tagStr == null ? Collections.<String>emptyList() : StrUtils.splitSmart(tagStr, ',');
String threadStr = localParams.get(CommonParams.THREADS);
if (threadStr != null) {
threads = Integer.parseInt(threadStr);
}
// figure out if we need a new base DocSet
String excludeStr = localParams.get(CommonParams.EXCLUDE);
if (excludeStr == null)
return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
List<String> excludeTagList = StrUtils.splitSmart(excludeStr, ',');
docs = computeDocSet(docs, excludeTagList);
return new ParsedParams(localParams, params, required, facetValue, docs, key, tags, threads);
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class RequestUtil method processParams.
/**
* Set default-ish params on a SolrQueryRequest as well as do standard macro processing and JSON request parsing.
*
* @param handler The search handler this is for (may be null if you don't want this method touching the content streams)
* @param req The request whose params we are interested in
* @param defaults values to be used if no values are specified in the request params
* @param appends values to be appended to those from the request (or defaults) when dealing with multi-val params, or treated as another layer of defaults for singl-val params.
* @param invariants values which will be used instead of any request, or default values, regardless of context.
*/
public static void processParams(SolrRequestHandler handler, SolrQueryRequest req, SolrParams defaults, SolrParams appends, SolrParams invariants) {
boolean searchHandler = handler instanceof SearchHandler;
SolrParams params = req.getParams();
// Handle JSON stream for search requests
if (searchHandler && req.getContentStreams() != null) {
Map<String, String[]> map = MultiMapSolrParams.asMultiMap(params, false);
if (!(params instanceof MultiMapSolrParams || params instanceof ModifiableSolrParams)) {
// need to set params on request since we weren't able to access the original map
params = new MultiMapSolrParams(map);
req.setParams(params);
}
// params from the query string should come after (and hence override) JSON content streams
String[] jsonFromParams = map.remove(JSON);
for (ContentStream cs : req.getContentStreams()) {
String contentType = cs.getContentType();
if (contentType == null || !contentType.contains("/json")) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Bad contentType for search handler :" + contentType + " request=" + req);
}
try {
String jsonString = IOUtils.toString(cs.getReader());
if (jsonString != null) {
MultiMapSolrParams.addParam(JSON, jsonString, map);
}
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Exception reading content stream for request:" + req, e);
}
}
// append existing "json" params
if (jsonFromParams != null) {
for (String json : jsonFromParams) {
MultiMapSolrParams.addParam(JSON, json, map);
}
}
}
String[] jsonS = params.getParams(JSON);
boolean hasAdditions = defaults != null || invariants != null || appends != null || jsonS != null;
// short circuit processing
if (!hasAdditions && !params.getBool("expandMacros", true)) {
// nothing to do...
return;
}
boolean isShard = params.getBool("isShard", false);
Map<String, String[]> newMap = MultiMapSolrParams.asMultiMap(params, hasAdditions);
// The parameters we extract will be propagated anyway.
if (jsonS != null && !isShard) {
for (String json : jsonS) {
getParamsFromJSON(newMap, json);
}
}
// first populate defaults, etc..
if (defaults != null) {
Map<String, String[]> defaultsMap = MultiMapSolrParams.asMultiMap(defaults);
for (Map.Entry<String, String[]> entry : defaultsMap.entrySet()) {
String key = entry.getKey();
if (!newMap.containsKey(key)) {
newMap.put(key, entry.getValue());
}
}
}
if (appends != null) {
Map<String, String[]> appendsMap = MultiMapSolrParams.asMultiMap(appends);
for (Map.Entry<String, String[]> entry : appendsMap.entrySet()) {
String key = entry.getKey();
String[] arr = newMap.get(key);
if (arr == null) {
newMap.put(key, entry.getValue());
} else {
String[] appendArr = entry.getValue();
String[] newArr = new String[arr.length + appendArr.length];
System.arraycopy(arr, 0, newArr, 0, arr.length);
System.arraycopy(appendArr, 0, newArr, arr.length, appendArr.length);
newMap.put(key, newArr);
}
}
}
if (invariants != null) {
newMap.putAll(MultiMapSolrParams.asMultiMap(invariants));
}
if (!isShard) {
// Don't expand macros in shard requests
String[] doMacrosStr = newMap.get("expandMacros");
boolean doMacros = true;
if (doMacrosStr != null) {
doMacros = "true".equals(doMacrosStr[0]);
}
if (doMacros) {
newMap = MacroExpander.expand(newMap);
}
}
// Set these params as soon as possible so if there is an error processing later, things like
// "wt=json" will take effect from the defaults.
// newMap may still change below, but that should be OK
SolrParams newParams = new MultiMapSolrParams(newMap);
req.setParams(newParams);
// For example json.command started to be used in SOLR-6294, and that caused errors here.
if (!searchHandler)
return;
Map<String, Object> json = null;
// Handle JSON body first, so query params will always overlay on that
jsonS = newMap.get(JSON);
if (jsonS != null) {
if (json == null) {
json = new LinkedHashMap<>();
}
mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler());
}
for (String key : newMap.keySet()) {
// json.nl, json.wrf are existing query parameters
if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) {
if (json == null) {
json = new LinkedHashMap<>();
}
mergeJSON(json, key, newMap.get(key), new ObjectUtil.ConflictHandler());
}
}
// implement compat for existing components...
if (json != null && !isShard) {
for (Map.Entry<String, Object> entry : json.entrySet()) {
String key = entry.getKey();
String out = null;
boolean arr = false;
if ("query".equals(key)) {
out = "q";
} else if ("filter".equals(key)) {
out = "fq";
arr = true;
} else if ("fields".equals(key)) {
out = "fl";
arr = true;
} else if ("offset".equals(key)) {
out = "start";
} else if ("limit".equals(key)) {
out = "rows";
} else if (SORT.equals(key)) {
out = SORT;
} else if ("params".equals(key) || "facet".equals(key)) {
// handled elsewhere
continue;
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown top-level key in JSON request : " + key);
}
Object val = entry.getValue();
if (arr) {
String[] existing = newMap.get(out);
List lst = val instanceof List ? (List) val : null;
int existingSize = existing == null ? 0 : existing.length;
int jsonSize = lst == null ? 1 : lst.size();
String[] newval = new String[existingSize + jsonSize];
for (int i = 0; i < existingSize; i++) {
newval[i] = existing[i];
}
if (lst != null) {
for (int i = 0; i < jsonSize; i++) {
Object v = lst.get(i);
newval[existingSize + i] = v.toString();
}
} else {
newval[newval.length - 1] = val.toString();
}
newMap.put(out, newval);
} else {
newMap.put(out, new String[] { val.toString() });
}
}
}
if (json != null) {
req.setJSON(json);
}
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class XSLTResponseWriter method init.
@Override
public void init(NamedList n) {
final SolrParams p = SolrParams.toSolrParams(n);
xsltCacheLifetimeSeconds = p.getInt(XSLT_CACHE_PARAM, XSLT_CACHE_DEFAULT);
log.info("xsltCacheLifetimeSeconds=" + xsltCacheLifetimeSeconds);
}
Aggregations