use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class ClusteringComponentTest method testComponent.
@Test
public void testComponent() throws Exception {
SolrCore core = h.getCore();
SearchComponent sc = core.getSearchComponent("clustering");
assertTrue("sc is null and it shouldn't be", sc != null);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(ClusteringComponent.COMPONENT_NAME, "true");
params.add(CommonParams.Q, "*:*");
params.add(ClusteringParams.USE_SEARCH_RESULTS, "true");
SolrRequestHandler handler = core.getRequestHandler("standard");
SolrQueryResponse rsp;
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap<>());
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
NamedList<?> values = rsp.getValues();
Object clusters = values.get("clusters");
//System.out.println("Clusters: " + clusters);
assertTrue("clusters is null and it shouldn't be", clusters != null);
req.close();
params = new ModifiableSolrParams();
params.add(ClusteringComponent.COMPONENT_NAME, "true");
params.add(ClusteringParams.ENGINE_NAME, "mock");
params.add(ClusteringParams.USE_COLLECTION, "true");
params.add(QueryComponent.COMPONENT_NAME, "false");
handler = core.getRequestHandler("docClustering");
rsp = new SolrQueryResponse();
rsp.addResponseHeader(new SimpleOrderedMap<>());
req = new LocalSolrQueryRequest(core, params);
handler.handleRequest(req, rsp);
values = rsp.getValues();
clusters = values.get("clusters");
//System.out.println("Clusters: " + clusters);
assertTrue("clusters is null and it shouldn't be", clusters != null);
req.close();
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class CommitTracker method run.
/** This is the worker part for the ScheduledFuture **/
@Override
public void run() {
synchronized (this) {
// log.info("###start commit. pending=null");
// allow a new commit to be scheduled
pending = null;
}
SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
try {
CommitUpdateCommand command = new CommitUpdateCommand(req, false);
command.openSearcher = openSearcher;
command.waitSearcher = WAIT_SEARCHER;
command.softCommit = softCommit;
if (core.getCoreDescriptor().getCloudDescriptor() != null && core.getCoreDescriptor().getCloudDescriptor().isLeader() && !softCommit) {
command.version = core.getUpdateHandler().getUpdateLog().getVersionInfo().getNewClock();
}
// no need for command.maxOptimizeSegments = 1; since it is not optimizing
// we increment this *before* calling commit because it was causing a race
// in the tests (the new searcher was registered and the test proceeded
// to check the commit count before we had incremented it.)
autoCommitCount.incrementAndGet();
core.getUpdateHandler().commit(command);
} catch (Exception e) {
SolrException.log(log, "auto commit error...", e);
} finally {
// log.info("###done committing");
req.close();
}
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class CdcrUpdateLog method copyBufferedUpdates.
/**
* <p>
* Read the entries from the given tlog file and replay them as buffered updates.
* The buffered tlog that we are trying to copy might contain duplicate operations with the
* current update log. During the tlog replication process, the replica might buffer update operations
* that will be present also in the tlog files downloaded from the leader. In order to remove these
* duplicates, it will skip any operations with a version inferior to the latest know version.
*/
private void copyBufferedUpdates(File tlogSrc, long offsetSrc, long latestVersion) {
recoveryInfo = new RecoveryInfo();
recoveryInfo.positionOfStart = tlog == null ? 0 : tlog.snapshot();
state = State.BUFFERING;
operationFlags |= FLAG_GAP;
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM, DistributedUpdateProcessor.DistribPhase.FROMLEADER.toString());
SolrQueryRequest req = new LocalSolrQueryRequest(uhandler.core, params);
CdcrTransactionLog src = new CdcrTransactionLog(tlogSrc, null, true);
TransactionLog.LogReader tlogReader = src.getReader(offsetSrc);
try {
int operationAndFlags = 0;
for (; ; ) {
Object o = tlogReader.next();
// we reached the end of the tlog
if (o == null)
break;
// should currently be a List<Oper,Ver,Doc/Id>
List entry = (List) o;
operationAndFlags = (Integer) entry.get(0);
int oper = operationAndFlags & OPERATION_MASK;
long version = (Long) entry.get(1);
if (version <= latestVersion) {
// probably a buffered update that is also present in a tlog file coming from the leader,
// skip it.
log.debug("Dropping buffered operation - version {} < {}", version, latestVersion);
continue;
}
switch(oper) {
case UpdateLog.ADD:
{
SolrInputDocument sdoc = (SolrInputDocument) entry.get(entry.size() - 1);
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.solrDoc = sdoc;
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.BUFFERING);
this.add(cmd);
break;
}
case UpdateLog.DELETE:
{
byte[] idBytes = (byte[]) entry.get(2);
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.setIndexedId(new BytesRef(idBytes));
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.BUFFERING);
this.delete(cmd);
break;
}
case UpdateLog.DELETE_BY_QUERY:
{
String query = (String) entry.get(2);
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.query = query;
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.BUFFERING);
this.deleteByQuery(cmd);
break;
}
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid Operation! " + oper);
}
}
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to copy buffered updates", e);
} finally {
try {
tlogReader.close();
} finally {
this.doClose(src);
}
}
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class PostingsSolrHighlighter method getHighlighter.
@Override
protected UnifiedHighlighter getHighlighter(SolrQueryRequest req) {
// Adjust the highlight parameters to match what the old PostingsHighlighter had.
ModifiableSolrParams invariants = new ModifiableSolrParams();
invariants.set(HighlightParams.OFFSET_SOURCE, "POSTINGS");
invariants.set(HighlightParams.FIELD_MATCH, true);
invariants.set(HighlightParams.USE_PHRASE_HIGHLIGHTER, false);
invariants.set(HighlightParams.FRAGSIZE, -1);
ModifiableSolrParams defaults = new ModifiableSolrParams();
defaults.set(HighlightParams.DEFAULT_SUMMARY, true);
defaults.set(HighlightParams.TAG_ELLIPSIS, "... ");
SolrParams newParams = SolrParams.wrapDefaults(// this takes precedence
invariants, SolrParams.wrapDefaults(// then this (original)
req.getParams(), // finally our defaults
defaults));
try (LocalSolrQueryRequest fakeReq = new LocalSolrQueryRequest(req.getCore(), newParams)) {
return super.getHighlighter(fakeReq);
}
}
use of org.apache.solr.request.LocalSolrQueryRequest in project lucene-solr by apache.
the class TestZKPropertiesWriter method request.
public SolrQueryRequest request(String... q) {
LocalSolrQueryRequest req = lrf.makeRequest(q);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(req.getParams());
params.set("distrib", true);
req.setParams(params);
return req;
}
Aggregations