use of org.apache.solr.client.solrj.SolrServerException in project atlas by apache.
the class Solr5Index method query.
@Override
public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException {
List<RawQuery.Result<String>> result;
String collection = query.getStore();
String keyIdField = getKeyFieldId(collection);
SolrQuery solrQuery = new SolrQuery(query.getQuery()).addField(keyIdField).setIncludeScore(true).setStart(query.getOffset()).setRows(query.hasLimit() ? query.getLimit() : maxResults);
try {
QueryResponse response = solrClient.query(collection, solrQuery);
if (logger.isDebugEnabled())
logger.debug("Executed query [{}] in {} ms", query.getQuery(), response.getElapsedTime());
int totalHits = response.getResults().size();
if (!query.hasLimit() && totalHits >= maxResults) {
logger.warn("Query result set truncated to first [{}] elements for query: {}", maxResults, query);
}
result = new ArrayList<>(totalHits);
for (SolrDocument hit : response.getResults()) {
double score = Double.parseDouble(hit.getFieldValue("score").toString());
result.add(new RawQuery.Result<>(hit.getFieldValue(keyIdField).toString(), score));
}
} catch (IOException e) {
logger.error("Query did not complete : ", e);
throw new PermanentBackendException(e);
} catch (SolrServerException e) {
logger.error("Unable to query Solr index.", e);
throw new PermanentBackendException(e);
}
return result;
}
use of org.apache.solr.client.solrj.SolrServerException in project atlas by apache.
the class Solr5Index method createCollectionIfNotExists.
private static void createCollectionIfNotExists(CloudSolrClient client, Configuration config, String collection) throws IOException, SolrServerException, KeeperException, InterruptedException {
if (!checkIfCollectionExists(client, collection)) {
Integer numShards = config.get(NUM_SHARDS);
Integer maxShardsPerNode = config.get(MAX_SHARDS_PER_NODE);
Integer replicationFactor = config.get(REPLICATION_FACTOR);
CollectionAdminRequest.Create createRequest = new CollectionAdminRequest.Create();
createRequest.setConfigName(collection);
createRequest.setCollectionName(collection);
createRequest.setNumShards(numShards);
createRequest.setMaxShardsPerNode(maxShardsPerNode);
createRequest.setReplicationFactor(replicationFactor);
CollectionAdminResponse createResponse = createRequest.process(client);
if (createResponse.isSuccess()) {
logger.trace("Collection {} successfully created.", collection);
} else {
throw new SolrServerException(Joiner.on("\n").join(createResponse.getErrorMessages()));
}
}
waitForRecoveriesToFinish(client, collection);
}
use of org.apache.solr.client.solrj.SolrServerException in project SearchServices by Alfresco.
the class AlfrescoSearchHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
readJsonIntoContent(req);
List<SearchComponent> components = getComponents();
ResponseBuilder rb = new ResponseBuilder(req, rsp, components);
if (rb.requestInfo != null) {
rb.requestInfo.setResponseBuilder(rb);
}
boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);
rb.setDebug(dbg);
if (dbg == false) {
// if it's true, we are doing everything anyway.
SolrPluginUtils.getDebugInterests(req.getParams().getParams(CommonParams.DEBUG), rb);
}
final RTimerTree timer = rb.isDebug() ? req.getRequestTimer() : null;
// creates
final ShardHandler shardHandler1 = getAndPrepShardHandler(req, rb);
if (timer == null) {
for (SearchComponent c : components) {
c.prepare(rb);
}
} else {
// debugging prepare phase
RTimerTree subt = timer.sub("prepare");
for (SearchComponent c : components) {
rb.setTimer(subt.sub(c.getName()));
c.prepare(rb);
rb.getTimer().stop();
}
subt.stop();
}
if (!rb.isDistrib) {
// a normal non-distributed request
long timeAllowed = req.getParams().getLong(CommonParams.TIME_ALLOWED, -1L);
if (timeAllowed > 0L) {
SolrQueryTimeoutImpl.set(timeAllowed);
}
try {
// it makes sense to have two control loops
if (!rb.isDebug()) {
// Process
for (SearchComponent c : components) {
c.process(rb);
}
} else {
// Process
RTimerTree subt = timer.sub("process");
for (SearchComponent c : components) {
rb.setTimer(subt.sub(c.getName()));
c.process(rb);
rb.getTimer().stop();
}
subt.stop();
// add the timing info
if (rb.isDebugTimings()) {
rb.addDebugInfo("timing", timer.asNamedList());
}
}
} catch (ExitableDirectoryReader.ExitingReaderException ex) {
log.warn("Query: " + req.getParamString() + "; " + ex.getMessage());
SolrDocumentList r = (SolrDocumentList) rb.rsp.getValues().get("response");
if (r == null)
r = new SolrDocumentList();
r.setNumFound(0);
rb.rsp.add("response", r);
if (rb.isDebug()) {
NamedList debug = new NamedList();
debug.add("explain", new NamedList());
rb.rsp.add("debug", debug);
}
rb.rsp.getResponseHeader().add("partialResults", Boolean.TRUE);
} finally {
SolrQueryTimeoutImpl.reset();
}
if (req.getParams().getBool("alfresco.getSolrDocumentList", false)) {
NamedList values = rsp.getValues();
ResultContext response = (ResultContext) values.get("response");
SolrDocumentList newResponse = new SolrDocumentList();
DocList docs = response.getDocList();
for (DocIterator it = docs.iterator(); it.hasNext(); ) /**/
{
newResponse.add(toSolrDocument(req.getSearcher().doc(it.nextDoc()), req.getSchema()));
}
values.add("responseSolrDocumentList", newResponse);
}
} else {
if (rb.outgoing == null) {
rb.outgoing = new LinkedList<>();
}
rb.finished = new ArrayList<>();
int nextStage = 0;
do {
rb.stage = nextStage;
nextStage = ResponseBuilder.STAGE_DONE;
// call all components
for (SearchComponent c : components) {
// the next stage is the minimum of what all components
// report
nextStage = Math.min(nextStage, c.distributedProcess(rb));
}
// check the outgoing queue and send requests
while (rb.outgoing.size() > 0) {
// submit all current request tasks at once
while (rb.outgoing.size() > 0) {
ShardRequest sreq = rb.outgoing.remove(0);
sreq.actualShards = sreq.shards;
if (sreq.actualShards == ShardRequest.ALL_SHARDS) {
sreq.actualShards = rb.shards;
}
sreq.responses = new ArrayList<>(// presume we'll get
sreq.actualShards.length);
// TODO: map from shard to address[]
for (String shard : sreq.actualShards) {
ModifiableSolrParams params = new ModifiableSolrParams(sreq.params);
// not a
params.remove(ShardParams.SHARDS);
// top-level
// request
// not a
params.set(CommonParams.DISTRIB, "false");
// top-level
// request
params.remove("indent");
params.remove(CommonParams.HEADER_ECHO_PARAMS);
// a sub
params.set(ShardParams.IS_SHARD, true);
// (shard)
// request
params.set(ShardParams.SHARDS_PURPOSE, sreq.purpose);
// so the
params.set(ShardParams.SHARD_URL, shard);
// asked
if (req.getContext().get(AbstractQParser.ALFRESCO_JSON) != null) {
// This will add the Alfresco JSON as a parameter, overwriting the parameter if it already exists.
params.set(AbstractQParser.ALFRESCO_JSON, req.getContext().get(AbstractQParser.ALFRESCO_JSON).toString());
}
if (rb.requestInfo != null) {
// we could try and detect when this is needed,
// but it could be tricky
params.set("NOW", Long.toString(rb.requestInfo.getNOW().getTime()));
}
String shardQt = params.get(ShardParams.SHARDS_QT);
if (shardQt != null) {
params.set(CommonParams.QT, shardQt);
} else {
// prior to 5.1
if (req.getCore().getSolrConfig().luceneMatchVersion.onOrAfter(Version.LUCENE_5_1_0)) {
String reqPath = (String) req.getContext().get(PATH);
if (!"/select".equals(reqPath)) {
params.set(CommonParams.QT, reqPath);
}
// else if path is /select, then the qt
// gets passed thru if set
} else {
// this is the pre-5.1 behavior, which
// translates to sending the shard request
// to /select
params.remove(CommonParams.QT);
}
}
shardHandler1.submit(sreq, shard, params);
}
}
// now wait for replies, but if anyone puts more requests on
// the outgoing queue, send them out immediately (by exiting
// this loop)
boolean tolerant = rb.req.getParams().getBool(ShardParams.SHARDS_TOLERANT, false);
while (rb.outgoing.size() == 0) {
ShardResponse srsp = tolerant ? shardHandler1.takeCompletedIncludingErrors() : shardHandler1.takeCompletedOrError();
if (srsp == null)
// no more requests to wait for
break;
// Was there an exception?
if (srsp.getException() != null) {
// rethrow
if (!tolerant) {
shardHandler1.cancelAll();
if (srsp.getException() instanceof SolrException) {
throw (SolrException) srsp.getException();
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, srsp.getException());
}
} else {
if (rsp.getResponseHeader().get("partialResults") == null) {
rsp.getResponseHeader().add("partialResults", Boolean.TRUE);
}
}
}
rb.finished.add(srsp.getShardRequest());
// let the components see the responses to the request
for (SearchComponent c : components) {
c.handleResponses(rb, srsp.getShardRequest());
}
}
}
for (SearchComponent c : components) {
c.finishStage(rb);
}
// we are done when the next stage is MAX_VALUE
} while (nextStage != Integer.MAX_VALUE);
}
// circuited distrib request
if (!rb.isDistrib && req.getParams().getBool(ShardParams.SHARDS_INFO, false) && rb.shortCircuitedURL != null) {
NamedList<Object> shardInfo = new SimpleOrderedMap<Object>();
SimpleOrderedMap<Object> nl = new SimpleOrderedMap<Object>();
if (rsp.getException() != null) {
Throwable cause = rsp.getException();
if (cause instanceof SolrServerException) {
cause = ((SolrServerException) cause).getRootCause();
} else {
if (cause.getCause() != null) {
cause = cause.getCause();
}
}
nl.add("error", cause.toString());
StringWriter trace = new StringWriter();
cause.printStackTrace(new PrintWriter(trace));
nl.add("trace", trace.toString());
} else {
nl.add("numFound", rb.getResults().docList.matches());
nl.add("maxScore", rb.getResults().docList.maxScore());
}
nl.add("shardAddress", rb.shortCircuitedURL);
// elapsed time of
nl.add("time", req.getRequestTimer().getTime());
// this request
// so far
int pos = rb.shortCircuitedURL.indexOf("://");
String shardInfoName = pos != -1 ? rb.shortCircuitedURL.substring(pos + 3) : rb.shortCircuitedURL;
shardInfo.add(shardInfoName, nl);
rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo);
}
}
use of org.apache.solr.client.solrj.SolrServerException in project solr-document-store by DBCDK.
the class Worker method makeWorker.
public JobConsumer<QueueJob> makeWorker() {
return new JobConsumer<QueueJob>() {
@Override
public void accept(Connection connection, QueueJob job, JobMetaData metaData) throws FatalQueueError, NonFatalQueueError, PostponedNonFatalQueueError {
log.info("job = {}, metadata = {}", job, metaData);
try {
JsonNode sourceDoc = docProducer.fetchSourceDoc(job);
SolrInputDocument solrDocument = docProducer.createSolrDocument(sourceDoc);
String bibliographicShardId = docProducer.bibliographicShardId(sourceDoc);
docProducer.deleteSolrDocuments(bibliographicShardId, job.getCommitwithin());
docProducer.deploy(solrDocument, job.getCommitwithin());
} catch (IOException ex) {
throw new NonFatalQueueError(ex);
} catch (SolrServerException ex) {
throw new FatalQueueError(ex);
}
}
};
}
use of org.apache.solr.client.solrj.SolrServerException in project e3mall by colg-cloud.
the class TbItemServiceImpl method search.
@Override
public SearchResult search(SolrQuery solrQuery) {
// 根据solrQuery查询索引库
QueryResponse query = null;
try {
query = solrServer.query(solrQuery);
} catch (SolrServerException e) {
e.printStackTrace();
LOGGER.error("SolrServer 通信/解析异常!");
}
// 取查询结果
SolrDocumentList results = query.getResults();
// 取查询结果总记录数
long recourdCount = results.getNumFound();
// 取得商品列表,需要取高亮显示
Map<String, Map<String, List<String>>> highlighting = query.getHighlighting();
List<TbItemSerach> itemList = new ArrayList<>();
for (SolrDocument doc : results) {
TbItemSerach itemSerach = new TbItemSerach();
itemSerach.setId((String) doc.get("id"));
itemSerach.setSellPoint((String) doc.get("item_sell_point"));
itemSerach.setPrice((Long) doc.get("item_price"));
itemSerach.setImage((String) doc.get("item_image"));
itemSerach.setCategoryName((String) doc.get("item_category_name"));
// 取高亮显示
List<String> list = highlighting.get(doc.get("id")).get("item_title");
String title;
if (ListUtil.isNotEmpty(list)) {
title = list.get(0);
} else {
title = (String) doc.get("item_title");
}
itemSerach.setTitle(title);
// 添加到商品列表
itemList.add(itemSerach);
}
// 封装到SearchResult
SearchResult searchResult = new SearchResult();
searchResult.setRecourdCount(recourdCount);
searchResult.setItemSerachs(itemList);
// 返回结果
return searchResult;
}
Aggregations