use of org.apache.solr.search.QParser in project lucene-solr by apache.
the class SimpleFacets method computeDocSet.
protected DocSet computeDocSet(DocSet baseDocSet, List<String> excludeTagList) throws SyntaxError, IOException {
Map<?, ?> tagMap = (Map<?, ?>) req.getContext().get("tags");
// rb can be null if facets are being calculated from a RequestHandler e.g. MoreLikeThisHandler
if (tagMap == null || rb == null) {
return baseDocSet;
}
IdentityHashMap<Query, Boolean> excludeSet = new IdentityHashMap<>();
for (String excludeTag : excludeTagList) {
Object olst = tagMap.get(excludeTag);
// tagMap has entries of List<String,List<QParser>>, but subject to change in the future
if (!(olst instanceof Collection))
continue;
for (Object o : (Collection<?>) olst) {
if (!(o instanceof QParser))
continue;
QParser qp = (QParser) o;
excludeSet.put(qp.getQuery(), Boolean.TRUE);
}
}
if (excludeSet.size() == 0)
return baseDocSet;
List<Query> qlist = new ArrayList<>();
// add the base query
if (!excludeSet.containsKey(rb.getQuery())) {
qlist.add(rb.getQuery());
}
// add the filters
if (rb.getFilters() != null) {
for (Query q : rb.getFilters()) {
if (!excludeSet.containsKey(q)) {
qlist.add(q);
}
}
}
// get the new base docset for this facet
DocSet base = searcher.getDocSet(qlist);
if (rb.grouping() && rb.getGroupingSpec().isTruncateGroups()) {
Grouping grouping = new Grouping(searcher, null, rb.getQueryCommand(), false, 0, false);
grouping.setWithinGroupSort(rb.getGroupingSpec().getSortWithinGroup());
if (rb.getGroupingSpec().getFields().length > 0) {
grouping.addFieldCommand(rb.getGroupingSpec().getFields()[0], req);
} else if (rb.getGroupingSpec().getFunctions().length > 0) {
grouping.addFunctionCommand(rb.getGroupingSpec().getFunctions()[0], req);
} else {
return base;
}
AllGroupHeadsCollector allGroupHeadsCollector = grouping.getCommands().get(0).createAllGroupCollector();
searcher.search(base.getTopFilter(), allGroupHeadsCollector);
return new BitDocSet(allGroupHeadsCollector.retrieveGroupHeads(searcher.maxDoc()));
} else {
return base;
}
}
use of org.apache.solr.search.QParser in project lucene-solr by apache.
the class MoreLikeThisHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);
if (timeAllowed > 0) {
SolrQueryTimeoutImpl.set(timeAllowed);
}
try {
// Set field flags
ReturnFields returnFields = new SolrReturnFields(req);
rsp.setReturnFields(returnFields);
int flags = 0;
if (returnFields.wantsScore()) {
flags |= SolrIndexSearcher.GET_SCORES;
}
String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
String q = params.get(CommonParams.Q);
Query query = null;
SortSpec sortSpec = null;
List<Query> filters = null;
try {
if (q != null) {
QParser parser = QParser.getParser(q, defType, req);
query = parser.getQuery();
sortSpec = parser.getSortSpec(true);
}
String[] fqs = req.getParams().getParams(CommonParams.FQ);
if (fqs != null && fqs.length != 0) {
filters = new ArrayList<>();
for (String fq : fqs) {
if (fq != null && fq.trim().length() != 0) {
QParser fqp = QParser.getParser(fq, req);
filters.add(fqp.getQuery());
}
}
}
} catch (SyntaxError e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
SolrIndexSearcher searcher = req.getSearcher();
MoreLikeThisHelper mlt = new MoreLikeThisHelper(params, searcher);
// Hold on to the interesting terms if relevant
TermStyle termStyle = TermStyle.get(params.get(MoreLikeThisParams.INTERESTING_TERMS));
List<InterestingTerm> interesting = (termStyle == TermStyle.NONE) ? null : new ArrayList<>(mlt.mlt.getMaxQueryTerms());
DocListAndSet mltDocs = null;
// Parse Required Params
// This will either have a single Reader or valid query
Reader reader = null;
try {
if (q == null || q.trim().length() < 1) {
Iterable<ContentStream> streams = req.getContentStreams();
if (streams != null) {
Iterator<ContentStream> iter = streams.iterator();
if (iter.hasNext()) {
reader = iter.next().getReader();
}
if (iter.hasNext()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "MoreLikeThis does not support multiple ContentStreams");
}
}
}
int start = params.getInt(CommonParams.START, CommonParams.START_DEFAULT);
int rows = params.getInt(CommonParams.ROWS, CommonParams.ROWS_DEFAULT);
// --------------------------------------------------------------------------------
if (reader != null) {
mltDocs = mlt.getMoreLikeThis(reader, start, rows, filters, interesting, flags);
} else if (q != null) {
// Matching options
boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true);
int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);
// Find the base match
DocList match = searcher.getDocList(query, null, null, matchOffset, 1, // only get the first one...
flags);
if (includeMatch) {
rsp.add("match", match);
}
// This is an iterator, but we only handle the first match
DocIterator iterator = match.iterator();
if (iterator.hasNext()) {
// do a MoreLikeThis query for each document in results
int id = iterator.nextDoc();
mltDocs = mlt.getMoreLikeThis(id, start, rows, filters, interesting, flags);
}
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "MoreLikeThis requires either a query (?q=) or text to find similar documents.");
}
} finally {
if (reader != null) {
reader.close();
}
}
if (mltDocs == null) {
// avoid NPE
mltDocs = new DocListAndSet();
}
rsp.addResponse(mltDocs.docList);
if (interesting != null) {
if (termStyle == TermStyle.DETAILS) {
NamedList<Float> it = new NamedList<>();
for (InterestingTerm t : interesting) {
it.add(t.term.toString(), t.boost);
}
rsp.add("interestingTerms", it);
} else {
List<String> it = new ArrayList<>(interesting.size());
for (InterestingTerm t : interesting) {
it.add(t.term.text());
}
rsp.add("interestingTerms", it);
}
}
// maybe facet the results
if (params.getBool(FacetParams.FACET, false)) {
if (mltDocs.docSet == null) {
rsp.add("facet_counts", null);
} else {
SimpleFacets f = new SimpleFacets(req, mltDocs.docSet, params);
rsp.add("facet_counts", FacetComponent.getFacetCounts(f));
}
}
boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);
boolean dbgQuery = false, dbgResults = false;
if (dbg == false) {
//if it's true, we are doing everything anyway.
String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
if (dbgParams != null) {
for (String dbgParam : dbgParams) {
if (dbgParam.equals(CommonParams.QUERY)) {
dbgQuery = true;
} else if (dbgParam.equals(CommonParams.RESULTS)) {
dbgResults = true;
}
}
}
} else {
dbgQuery = true;
dbgResults = true;
}
// Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
if (dbg == true) {
try {
NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.getRawMLTQuery(), mltDocs.docList, dbgQuery, dbgResults);
if (null != dbgInfo) {
if (null != filters) {
dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
List<String> fqs = new ArrayList<>(filters.size());
for (Query fq : filters) {
fqs.add(QueryParsing.toString(fq, req.getSchema()));
}
dbgInfo.add("parsed_filter_queries", fqs);
}
rsp.add("debug", dbgInfo);
}
} catch (Exception e) {
SolrException.log(log, "Exception during debug", e);
rsp.add("exception_during_debug", SolrException.toStr(e));
}
}
} catch (ExitableDirectoryReader.ExitingReaderException ex) {
log.warn("Query: " + req.getParamString() + "; " + ex.getMessage());
} finally {
SolrQueryTimeoutImpl.reset();
}
}
use of org.apache.solr.search.QParser in project lucene-solr by apache.
the class DateRangeField method getRangeQuery.
@Override
public Query getRangeQuery(QParser parser, SchemaField field, String startStr, String endStr, boolean minInclusive, boolean maxInclusive) {
if (parser == null) {
//null when invoked by SimpleFacets. But getQueryFromSpatialArgs expects to get localParams.
final SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo();
parser = new QParser("", null, requestInfo.getReq().getParams(), requestInfo.getReq()) {
@Override
public Query parse() throws SyntaxError {
throw new IllegalStateException();
}
};
}
Calendar startCal;
if (startStr == null) {
startCal = tree.newCal();
} else {
startCal = parseCalendar(startStr);
if (!minInclusive) {
startCal.add(Calendar.MILLISECOND, 1);
}
}
Calendar endCal;
if (endStr == null) {
endCal = tree.newCal();
} else {
endCal = parseCalendar(endStr);
if (!maxInclusive) {
endCal.add(Calendar.MILLISECOND, -1);
}
}
Shape shape = tree.toRangeShape(tree.toShape(startCal), tree.toShape(endCal));
SpatialArgs spatialArgs = new SpatialArgs(SpatialOperation.Intersects, shape);
return getQueryFromSpatialArgs(parser, field, spatialArgs);
}
use of org.apache.solr.search.QParser in project lucene-solr by apache.
the class ScoreJoinQParserPlugin method createParser.
@Override
public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
return new QParser(qstr, localParams, params, req) {
@Override
public Query parse() throws SyntaxError {
final String fromField = localParams.get("from");
final String fromIndex = localParams.get("fromIndex");
final String toField = localParams.get("to");
final ScoreMode scoreMode = ScoreModeParser.parse(getParam(SCORE));
final String v = localParams.get(CommonParams.VALUE);
final Query q = createQuery(fromField, v, fromIndex, toField, scoreMode, CommonParams.TRUE.equals(localParams.get("TESTenforceSameCoreAsAnotherOne")));
return q;
}
private Query createQuery(final String fromField, final String fromQueryStr, String fromIndex, final String toField, final ScoreMode scoreMode, boolean byPassShortCircutCheck) throws SyntaxError {
final String myCore = req.getCore().getCoreDescriptor().getName();
if (fromIndex != null && (!fromIndex.equals(myCore) || byPassShortCircutCheck)) {
CoreContainer container = req.getCore().getCoreContainer();
final String coreName = getCoreName(fromIndex, container);
final SolrCore fromCore = container.getCore(coreName);
RefCounted<SolrIndexSearcher> fromHolder = null;
if (fromCore == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cross-core join: no such core " + coreName);
}
long fromCoreOpenTime = 0;
LocalSolrQueryRequest otherReq = new LocalSolrQueryRequest(fromCore, params);
try {
QParser fromQueryParser = QParser.getParser(fromQueryStr, otherReq);
Query fromQuery = fromQueryParser.getQuery();
fromHolder = fromCore.getRegisteredSearcher();
if (fromHolder != null) {
fromCoreOpenTime = fromHolder.get().getOpenNanoTime();
}
return new OtherCoreJoinQuery(fromQuery, fromField, coreName, fromCoreOpenTime, scoreMode, toField);
} finally {
otherReq.close();
fromCore.close();
if (fromHolder != null)
fromHolder.decref();
}
} else {
QParser fromQueryParser = subQuery(fromQueryStr, null);
final Query fromQuery = fromQueryParser.getQuery();
return new SameCoreJoinQuery(fromQuery, fromField, toField, scoreMode);
}
}
};
}
use of org.apache.solr.search.QParser in project lucene-solr by apache.
the class BlockJoinParentQParser method parse.
@Override
public Query parse() throws SyntaxError {
String filter = localParams.get(getParentFilterLocalParamName());
String scoreMode = localParams.get("score", ScoreMode.None.name());
QParser parentParser = subQuery(filter, null);
Query parentQ = parentParser.getQuery();
String queryText = localParams.get(QueryParsing.V);
// there is no child query, return parent filter from cache
if (queryText == null || queryText.length() == 0) {
SolrConstantScoreQuery wrapped = new SolrConstantScoreQuery(getFilter(parentQ));
wrapped.setCache(false);
return wrapped;
}
QParser childrenParser = subQuery(queryText, null);
Query childrenQuery = childrenParser.getQuery();
return createQuery(parentQ, childrenQuery, scoreMode);
}
Aggregations