use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class MoreLikeThisHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1);
if (timeAllowed > 0) {
SolrQueryTimeoutImpl.set(timeAllowed);
}
try {
// Set field flags
ReturnFields returnFields = new SolrReturnFields(req);
rsp.setReturnFields(returnFields);
int flags = 0;
if (returnFields.wantsScore()) {
flags |= SolrIndexSearcher.GET_SCORES;
}
String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
String q = params.get(CommonParams.Q);
Query query = null;
SortSpec sortSpec = null;
List<Query> filters = null;
try {
if (q != null) {
QParser parser = QParser.getParser(q, defType, req);
query = parser.getQuery();
sortSpec = parser.getSortSpec(true);
}
String[] fqs = req.getParams().getParams(CommonParams.FQ);
if (fqs != null && fqs.length != 0) {
filters = new ArrayList<>();
for (String fq : fqs) {
if (fq != null && fq.trim().length() != 0) {
QParser fqp = QParser.getParser(fq, req);
filters.add(fqp.getQuery());
}
}
}
} catch (SyntaxError e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
}
SolrIndexSearcher searcher = req.getSearcher();
MoreLikeThisHelper mlt = new MoreLikeThisHelper(params, searcher);
// Hold on to the interesting terms if relevant
TermStyle termStyle = TermStyle.get(params.get(MoreLikeThisParams.INTERESTING_TERMS));
List<InterestingTerm> interesting = (termStyle == TermStyle.NONE) ? null : new ArrayList<>(mlt.mlt.getMaxQueryTerms());
DocListAndSet mltDocs = null;
// Parse Required Params
// This will either have a single Reader or valid query
Reader reader = null;
try {
if (q == null || q.trim().length() < 1) {
Iterable<ContentStream> streams = req.getContentStreams();
if (streams != null) {
Iterator<ContentStream> iter = streams.iterator();
if (iter.hasNext()) {
reader = iter.next().getReader();
}
if (iter.hasNext()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "MoreLikeThis does not support multiple ContentStreams");
}
}
}
int start = params.getInt(CommonParams.START, CommonParams.START_DEFAULT);
int rows = params.getInt(CommonParams.ROWS, CommonParams.ROWS_DEFAULT);
// --------------------------------------------------------------------------------
if (reader != null) {
mltDocs = mlt.getMoreLikeThis(reader, start, rows, filters, interesting, flags);
} else if (q != null) {
// Matching options
boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true);
int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);
// Find the base match
DocList match = searcher.getDocList(query, null, null, matchOffset, 1, // only get the first one...
flags);
if (includeMatch) {
rsp.add("match", match);
}
// This is an iterator, but we only handle the first match
DocIterator iterator = match.iterator();
if (iterator.hasNext()) {
// do a MoreLikeThis query for each document in results
int id = iterator.nextDoc();
mltDocs = mlt.getMoreLikeThis(id, start, rows, filters, interesting, flags);
}
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "MoreLikeThis requires either a query (?q=) or text to find similar documents.");
}
} finally {
if (reader != null) {
reader.close();
}
}
if (mltDocs == null) {
// avoid NPE
mltDocs = new DocListAndSet();
}
rsp.addResponse(mltDocs.docList);
if (interesting != null) {
if (termStyle == TermStyle.DETAILS) {
NamedList<Float> it = new NamedList<>();
for (InterestingTerm t : interesting) {
it.add(t.term.toString(), t.boost);
}
rsp.add("interestingTerms", it);
} else {
List<String> it = new ArrayList<>(interesting.size());
for (InterestingTerm t : interesting) {
it.add(t.term.text());
}
rsp.add("interestingTerms", it);
}
}
// maybe facet the results
if (params.getBool(FacetParams.FACET, false)) {
if (mltDocs.docSet == null) {
rsp.add("facet_counts", null);
} else {
SimpleFacets f = new SimpleFacets(req, mltDocs.docSet, params);
rsp.add("facet_counts", FacetComponent.getFacetCounts(f));
}
}
boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);
boolean dbgQuery = false, dbgResults = false;
if (dbg == false) {
//if it's true, we are doing everything anyway.
String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
if (dbgParams != null) {
for (String dbgParam : dbgParams) {
if (dbgParam.equals(CommonParams.QUERY)) {
dbgQuery = true;
} else if (dbgParam.equals(CommonParams.RESULTS)) {
dbgResults = true;
}
}
}
} else {
dbgQuery = true;
dbgResults = true;
}
// Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
if (dbg == true) {
try {
NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.getRawMLTQuery(), mltDocs.docList, dbgQuery, dbgResults);
if (null != dbgInfo) {
if (null != filters) {
dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
List<String> fqs = new ArrayList<>(filters.size());
for (Query fq : filters) {
fqs.add(QueryParsing.toString(fq, req.getSchema()));
}
dbgInfo.add("parsed_filter_queries", fqs);
}
rsp.add("debug", dbgInfo);
}
} catch (Exception e) {
SolrException.log(log, "Exception during debug", e);
rsp.add("exception_during_debug", SolrException.toStr(e));
}
}
} catch (ExitableDirectoryReader.ExitingReaderException ex) {
log.warn("Query: " + req.getParamString() + "; " + ex.getMessage());
} finally {
SolrQueryTimeoutImpl.reset();
}
}
use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class ContentStreamHandlerBase method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessorChain(params);
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
try {
ContentStreamLoader documentLoader = newLoader(req, processor);
Iterable<ContentStream> streams = req.getContentStreams();
if (streams == null) {
if (!RequestHandlerUtils.handleCommit(req, processor, params, false) && !RequestHandlerUtils.handleRollback(req, processor, params, false)) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing content stream");
}
} else {
for (ContentStream stream : streams) {
documentLoader.load(req, rsp, stream, processor);
}
// Perhaps commit from the parameters
RequestHandlerUtils.handleCommit(req, processor, params, false);
RequestHandlerUtils.handleRollback(req, processor, params, false);
}
} finally {
// finish the request
try {
processor.finish();
} finally {
processor.close();
}
}
}
use of org.apache.solr.common.util.ContentStream in project SearchServices by Alfresco.
the class AbstractAlfrescoSolrTests method areq.
/**
* Creates a solr request.
* @param params
* @param json
* @return
*/
public SolrServletRequest areq(ModifiableSolrParams params, String json) {
if (params.get("wt") == null) {
params.add("wt", "xml");
}
SolrServletRequest req = new SolrServletRequest(h.getCore(), null);
req.setParams(params);
if (json != null) {
ContentStream stream = new ContentStreamBase.StringStream(json);
ArrayList<ContentStream> streams = new ArrayList<ContentStream>();
streams.add(stream);
req.setContentStreams(streams);
}
return req;
}
use of org.apache.solr.common.util.ContentStream in project SearchServices by Alfresco.
the class AbstractQParser method getSearchParameters.
protected Pair<SearchParameters, Boolean> getSearchParameters() {
SearchParameters searchParameters = new SearchParameters();
Boolean isFilter = Boolean.FALSE;
Iterable<ContentStream> streams = req.getContentStreams();
JSONObject json = (JSONObject) req.getContext().get(ALFRESCO_JSON);
if (json == null) {
if (streams != null) {
try {
Reader reader = null;
for (ContentStream stream : streams) {
reader = new BufferedReader(new InputStreamReader(stream.getStream(), "UTF-8"));
}
// TODO - replace with streaming-based solution e.g. SimpleJSON ContentHandler
if (reader != null) {
json = new JSONObject(new JSONTokener(reader));
req.getContext().put(ALFRESCO_JSON, json);
}
} catch (JSONException e) {
// This is expected when there is no json element to the request
} catch (IOException e) {
throw new AlfrescoRuntimeException("IO Error parsing query parameters", e);
}
}
}
if (json != null) {
try {
if (getString() != null) {
if (getString().equals(AUTHORITY_FILTER_FROM_JSON)) {
isFilter = Boolean.TRUE;
ArrayList<String> tenantList = new ArrayList<String>(1);
JSONArray tenants = json.getJSONArray("tenants");
for (int i = 0; i < tenants.length(); i++) {
String tenantString = tenants.getString(i);
tenantList.add(tenantString);
}
ArrayList<String> authorityList = new ArrayList<String>(1);
JSONArray authorities = json.getJSONArray("authorities");
for (int i = 0; i < authorities.length(); i++) {
String authorityString = authorities.getString(i);
authorityList.add(authorityString);
}
char separator = getSeparator(authorityList);
StringBuilder authQuery = new StringBuilder();
StringBuilder denyQuery = new StringBuilder();
for (String tenant : tenantList) {
for (String authority : authorityList) {
if (separator == 0) {
if (authQuery.length() > 0) {
authQuery.append(" ");
denyQuery.append(" ");
}
switch(AuthorityType.getAuthorityType(authority)) {
case USER:
authQuery.append("|AUTHORITY:\"").append(authority).append("\"");
denyQuery.append("|DENIED:\"").append(authority).append("\"");
break;
case GROUP:
case EVERYONE:
case GUEST:
if (tenant.length() == 0) {
// Default tenant matches 4.0
authQuery.append("|AUTHORITY:\"").append(authority).append("\"");
denyQuery.append("|DENIED:\"").append(authority).append("\"");
} else {
authQuery.append("|AUTHORITY:\"").append(authority).append("@").append(tenant).append("\"");
denyQuery.append("|DENIED:\"").append(authority).append("@").append(tenant).append("\"");
}
break;
default:
authQuery.append("|AUTHORITY:\"").append(authority).append("\"");
denyQuery.append("|DENIED:\"").append(authority).append("\"");
break;
}
} else {
if (authQuery.length() == 0) {
authset = true;
authQuery.append("|AUTHSET:\"");
denyQuery.append("|DENYSET:\"");
}
switch(AuthorityType.getAuthorityType(authority)) {
case USER:
authQuery.append(separator).append(authority);
denyQuery.append(separator).append(authority);
break;
case GROUP:
case EVERYONE:
case GUEST:
if (tenant.length() == 0) {
// Default tenant matches 4.0
authQuery.append(separator).append(authority);
denyQuery.append(separator).append(authority);
} else {
authQuery.append(separator).append(authority).append("@").append(tenant);
denyQuery.append(separator).append(authority).append("@").append(tenant);
}
break;
default:
authQuery.append(separator).append(authority);
denyQuery.append(separator).append(authority);
break;
}
}
}
}
if (separator != 0) {
authQuery.append("\"");
denyQuery.append("\"");
}
if (authQuery.length() > 0) {
// Default to true for safety reasons.
final boolean anyDenyDenies = json.optBoolean("anyDenyDenies", true);
if (anyDenyDenies) {
authQuery.insert(0, "(").append(") AND NOT (").append(denyQuery).append(")");
// Record that the clause has been added.
// We only ever set this to true for solr4+
req.getContext().put("processedDenies", Boolean.TRUE);
}
searchParameters.setQuery(authQuery.toString());
}
} else if (getString().equals(TENANT_FILTER_FROM_JSON)) {
isFilter = Boolean.TRUE;
ArrayList<String> tenantList = new ArrayList<String>(1);
JSONArray tenants = json.getJSONArray("tenants");
for (int i = 0; i < tenants.length(); i++) {
String tenantString = tenants.getString(i);
tenantList.add(tenantString);
}
StringBuilder tenantQuery = new StringBuilder();
for (String tenant : tenantList) {
if (tenantQuery.length() > 0) {
tenantQuery.append(" ");
}
if (tenant.length() > 0) {
tenantQuery.append("|TENANT:\"").append(tenant).append("\"");
} else {
// TODO: Need to check for the default tenant or no tenant (4.0) or we force a reindex
// requirement later ...
// Better to add default tenant to the 4.0 index
tenantQuery.append("|TENANT:\"").append("_DEFAULT_").append("\"");
// tenantQuery.append(" |(+ISNODE:T -TENANT:*)");
}
}
searchParameters.setQuery(tenantQuery.toString());
} else if (getString().equals(RERANK_QUERY_FROM_CONTEXT)) {
String searchTerm = getParam("spellcheck.q");
searchParameters.setQuery(searchTerm);
}
} else {
String query = json.getString("query");
if (query != null) {
searchParameters.setQuery(query);
}
}
JSONArray locales = json.getJSONArray("locales");
for (int i = 0; i < locales.length(); i++) {
String localeString = locales.getString(i);
Locale locale = DefaultTypeConverter.INSTANCE.convert(Locale.class, localeString);
searchParameters.addLocale(locale);
}
JSONArray templates = json.getJSONArray("templates");
for (int i = 0; i < templates.length(); i++) {
JSONObject template = templates.getJSONObject(i);
String name = template.getString("name");
String queryTemplate = template.getString("template");
searchParameters.addQueryTemplate(name, queryTemplate);
}
JSONArray allAttributes = json.getJSONArray("allAttributes");
for (int i = 0; i < allAttributes.length(); i++) {
String allAttribute = allAttributes.getString(i);
searchParameters.addAllAttribute(allAttribute);
}
searchParameters.setDefaultFTSOperator(Operator.valueOf(json.getString("defaultFTSOperator")));
searchParameters.setDefaultFTSFieldConnective(Operator.valueOf(json.getString("defaultFTSFieldOperator")));
if (json.has("mlAnalaysisMode")) {
searchParameters.setMlAnalaysisMode(MLAnalysisMode.valueOf(json.getString("mlAnalaysisMode")));
}
searchParameters.setNamespace(json.getString("defaultNamespace"));
JSONArray textAttributes = json.getJSONArray("textAttributes");
for (int i = 0; i < textAttributes.length(); i++) {
String textAttribute = textAttributes.getString(i);
searchParameters.addTextAttribute(textAttribute);
}
searchParameters.setQueryConsistency(QueryConsistency.valueOf(json.getString("queryConsistency")));
} catch (JSONException e) {
// This is expected when there is no json element to the request
}
}
if (json != null) {
if (log.isDebugEnabled()) {
log.debug(json.toString());
}
}
if (searchParameters.getQuery() == null) {
searchParameters.setQuery(getString());
}
if (searchParameters.getLocales().size() == 0) {
searchParameters.addLocale(I18NUtil.getLocale());
}
String defaultField = getParam(CommonParams.DF);
if (defaultField != null) {
searchParameters.setDefaultFieldName(defaultField);
}
if (autoDetectQueryLocale) {
String searchTerm = getParam("spellcheck.q");
if (searchTerm != null) {
searchParameters.setSearchTerm(searchTerm);
List<DetectedLanguage> detetcted = detectLanguage(searchTerm);
if ((detetcted != null) && (detetcted.size() > 0)) {
Locale detectedLocale = Locale.forLanguageTag(detetcted.get(0).getLangCode());
if (localeIsNotIncluded(searchParameters, detectedLocale)) {
searchParameters.addLocale(Locale.forLanguageTag(detectedLocale.getLanguage()));
}
}
}
}
if (fixedQueryLocales.size() > 0) {
for (String locale : fixedQueryLocales) {
searchParameters.addLocale(Locale.forLanguageTag(locale));
}
}
// searchParameters.setMlAnalaysisMode(getMLAnalysisMode());
searchParameters.setNamespace(NamespaceService.CONTENT_MODEL_1_0_URI);
return new Pair<SearchParameters, Boolean>(searchParameters, isFilter);
}
use of org.apache.solr.common.util.ContentStream in project storm by apache.
the class SolrJsonMapper method createtSolrRequest.
private SolrRequest createtSolrRequest(String json) {
final ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(jsonUpdateUrl);
final ContentStream cs = new ContentStreamBase.StringStream(json, CONTENT_TYPE);
request.addContentStream(cs);
if (logger.isDebugEnabled()) {
logger.debug("Request generated with JSON: " + json);
}
return request;
}
Aggregations