use of org.apache.solr.update.processor.UpdateRequestProcessorChain in project lucene-solr by apache.
the class DefaultValueUpdateProcessorTest method processAdd.
/**
* Runs a document through the specified chain, and returns the final
* document used when the chain is completed (NOTE: some chains may
* modify the document in place
*/
SolrInputDocument processAdd(final String chain, final SolrInputDocument docIn) throws IOException {
SolrCore core = h.getCore();
UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain);
assertNotNull("No Chain named: " + chain, pc);
SolrQueryResponse rsp = new SolrQueryResponse();
SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
try {
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
AddUpdateCommand cmd = new AddUpdateCommand(req);
cmd.solrDoc = docIn;
UpdateRequestProcessor processor = pc.createProcessor(req, rsp);
processor.processAdd(cmd);
return cmd.solrDoc;
} finally {
SolrRequestInfo.clearRequestInfo();
req.close();
}
}
use of org.apache.solr.update.processor.UpdateRequestProcessorChain in project lucene-solr by apache.
the class TestXIncludeConfig method testXInclude.
public void testXInclude() throws Exception {
SolrCore core = h.getCore();
assertNotNull("includedHandler is null", core.getRequestHandler("includedHandler"));
UpdateRequestProcessorChain chain = core.getUpdateProcessingChain("special-include");
assertNotNull("chain is missing included processor", chain);
assertEquals("chain with inclued processor is wrong size", 1, chain.getProcessors().size());
assertEquals("chain has wrong included processor", RegexReplaceProcessorFactory.class, chain.getProcessors().get(0).getClass());
IndexSchema schema = core.getLatestSchema();
// xinclude
assertNotNull("ft-included is null", schema.getFieldTypeByName("ft-included"));
assertNotNull("field-included is null", schema.getFieldOrNull("field-included"));
// entity include
assertNotNull("ft-entity-include1 is null", schema.getFieldTypeByName("ft-entity-include1"));
assertNotNull("ft-entity-include2 is null", schema.getFieldTypeByName("ft-entity-include2"));
// sanity check
assertNull(// Does Not Exist Anywhere
"ft-entity-include3 is not null", schema.getFieldTypeByName("ft-entity-include3"));
}
use of org.apache.solr.update.processor.UpdateRequestProcessorChain in project lucene-solr by apache.
the class ContentStreamHandlerBase method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessorChain(params);
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
try {
ContentStreamLoader documentLoader = newLoader(req, processor);
Iterable<ContentStream> streams = req.getContentStreams();
if (streams == null) {
if (!RequestHandlerUtils.handleCommit(req, processor, params, false) && !RequestHandlerUtils.handleRollback(req, processor, params, false)) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing content stream");
}
} else {
for (ContentStream stream : streams) {
documentLoader.load(req, rsp, stream, processor);
}
// Perhaps commit from the parameters
RequestHandlerUtils.handleCommit(req, processor, params, false);
RequestHandlerUtils.handleRollback(req, processor, params, false);
}
} finally {
// finish the request
try {
processor.finish();
} finally {
processor.close();
}
}
}
use of org.apache.solr.update.processor.UpdateRequestProcessorChain in project lucene-solr by apache.
the class PeerSync method handleUpdates.
private boolean handleUpdates(ShardResponse srsp) {
// we retrieved the last N updates from the replica
List<Object> updates = (List<Object>) srsp.getSolrResponse().getResponse().get("updates");
SyncShardRequest sreq = (SyncShardRequest) srsp.getShardRequest();
if (updates.size() < sreq.totalRequestedUpdates) {
log.error(msg() + " Requested " + sreq.totalRequestedUpdates + " updates from " + sreq.shards[0] + " but retrieved " + updates.size());
return false;
}
// overwrite fingerprint we saved in 'handleVersions()'
Object fingerprint = srsp.getSolrResponse().getResponse().get("fingerprint");
if (fingerprint != null) {
sreq.fingerprint = IndexFingerprint.fromObject(fingerprint);
}
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(DISTRIB_UPDATE_PARAM, FROMLEADER.toString());
// debugging
params.set("peersync", true);
SolrQueryRequest req = new LocalSolrQueryRequest(uhandler.core, params);
SolrQueryResponse rsp = new SolrQueryResponse();
UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessingChain(null);
UpdateRequestProcessor proc = processorChain.createProcessor(req, rsp);
Collections.sort(updates, updateRecordComparator);
Object o = null;
long lastVersion = 0;
try {
// Apply oldest updates first
for (Object obj : updates) {
// should currently be a List<Oper,Ver,Doc/Id>
o = obj;
List<Object> entry = (List<Object>) o;
if (debug) {
log.debug(msg() + "raw update record " + o);
}
int oper = (Integer) entry.get(0) & UpdateLog.OPERATION_MASK;
long version = (Long) entry.get(1);
if (version == lastVersion && version != 0)
continue;
lastVersion = version;
switch(oper) {
case UpdateLog.ADD:
{
// byte[] idBytes = (byte[]) entry.get(2);
SolrInputDocument sdoc = (SolrInputDocument) entry.get(entry.size() - 1);
AddUpdateCommand cmd = new AddUpdateCommand(req);
// cmd.setIndexedId(new BytesRef(idBytes));
cmd.solrDoc = sdoc;
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.PEER_SYNC | UpdateCommand.IGNORE_AUTOCOMMIT);
if (debug) {
log.debug(msg() + "add " + cmd + " id " + sdoc.getField(ID));
}
proc.processAdd(cmd);
break;
}
case UpdateLog.DELETE:
{
byte[] idBytes = (byte[]) entry.get(2);
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.setIndexedId(new BytesRef(idBytes));
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.PEER_SYNC | UpdateCommand.IGNORE_AUTOCOMMIT);
if (debug) {
log.debug(msg() + "delete " + cmd + " " + new BytesRef(idBytes).utf8ToString());
}
proc.processDelete(cmd);
break;
}
case UpdateLog.DELETE_BY_QUERY:
{
String query = (String) entry.get(2);
DeleteUpdateCommand cmd = new DeleteUpdateCommand(req);
cmd.query = query;
cmd.setVersion(version);
cmd.setFlags(UpdateCommand.PEER_SYNC | UpdateCommand.IGNORE_AUTOCOMMIT);
if (debug) {
log.debug(msg() + "deleteByQuery " + cmd);
}
proc.processDelete(cmd);
break;
}
case UpdateLog.UPDATE_INPLACE:
{
AddUpdateCommand cmd = UpdateLog.convertTlogEntryToAddUpdateCommand(req, entry, oper, version);
cmd.setFlags(UpdateCommand.PEER_SYNC | UpdateCommand.IGNORE_AUTOCOMMIT);
if (debug) {
log.debug(msg() + "inplace update " + cmd + " prevVersion=" + cmd.prevVersion + ", doc=" + cmd.solrDoc);
}
proc.processAdd(cmd);
break;
}
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown Operation! " + oper);
}
}
} catch (IOException e) {
// TODO: should this be handled separately as a problem with us?
// I guess it probably already will by causing replication to be kicked off.
sreq.updateException = e;
log.error(msg() + "Error applying updates from " + sreq.shards + " ,update=" + o, e);
return false;
} catch (Exception e) {
sreq.updateException = e;
log.error(msg() + "Error applying updates from " + sreq.shards + " ,update=" + o, e);
return false;
} finally {
try {
proc.finish();
} catch (Exception e) {
sreq.updateException = e;
log.error(msg() + "Error applying updates from " + sreq.shards + " ,finish()", e);
return false;
} finally {
IOUtils.closeQuietly(proc);
}
}
return compareFingerprint(sreq);
}
Aggregations