use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class TestDistributedSearch method queryPartialResults.
protected void queryPartialResults(final List<String> upShards, final List<SolrClient> upClients, Object... q) throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
for (int i = 0; i < q.length; i += 2) {
params.add(q[i].toString(), q[i + 1].toString());
}
// TODO: look into why passing true causes fails
params.set("distrib", "false");
final QueryResponse controlRsp = controlClient.query(params);
// if time.allowed is specified then even a control response can return a partialResults header
if (params.get(CommonParams.TIME_ALLOWED) == null) {
validateControlData(controlRsp);
}
params.remove("distrib");
setDistributedParams(params);
QueryResponse rsp = queryRandomUpServer(params, upClients);
comparePartialResponses(rsp, controlRsp, upShards);
if (stress > 0) {
log.info("starting stress...");
Thread[] threads = new Thread[nThreads];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread() {
@Override
public void run() {
for (int j = 0; j < stress; j++) {
int which = r.nextInt(upClients.size());
SolrClient client = upClients.get(which);
try {
QueryResponse rsp = client.query(new ModifiableSolrParams(params));
if (verifyStress) {
comparePartialResponses(rsp, controlRsp, upShards);
}
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
}
};
threads[i].start();
}
for (Thread thread : threads) {
thread.join();
}
}
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class TestJoin method testJoin.
@Test
public void testJoin() throws Exception {
assertU(add(doc("id", "1", "name", "john", "title", "Director", "dept_s", "Engineering")));
assertU(add(doc("id", "2", "name", "mark", "title", "VP", "dept_s", "Marketing")));
assertU(add(doc("id", "3", "name", "nancy", "title", "MTS", "dept_s", "Sales")));
assertU(add(doc("id", "4", "name", "dave", "title", "MTS", "dept_s", "Support", "dept_s", "Engineering")));
assertU(add(doc("id", "5", "name", "tina", "title", "VP", "dept_s", "Engineering")));
assertU(add(doc("id", "10", "dept_id_s", "Engineering", "text", "These guys develop stuff")));
assertU(add(doc("id", "11", "dept_id_s", "Marketing", "text", "These guys make you look good")));
assertU(add(doc("id", "12", "dept_id_s", "Sales", "text", "These guys sell stuff")));
assertU(add(doc("id", "13", "dept_id_s", "Support", "text", "These guys help customers")));
assertU(commit());
ModifiableSolrParams p = params("sort", "id asc");
// test debugging
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s}title:MTS", "fl", "id", "debugQuery", "true"), "/debug/join/{!join from=dept_s to=dept_id_s}title:MTS=={'_MATCH_':'fromSetSize,toSetSize', 'fromSetSize':2, 'toSetSize':3}");
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s}title:MTS", "fl", "id"), "/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}");
// empty from
assertJQ(req(p, "q", "{!join from=noexist_s to=dept_id_s}*:*", "fl", "id"), "/response=={'numFound':0,'start':0,'docs':[]}");
// empty to
assertJQ(req(p, "q", "{!join from=dept_s to=noexist_s}*:*", "fl", "id"), "/response=={'numFound':0,'start':0,'docs':[]}");
// self join... return everyone with she same title as Dave
assertJQ(req(p, "q", "{!join from=title to=title}name:dave", "fl", "id"), "/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}");
// find people that develop stuff
assertJQ(req(p, "q", "{!join from=dept_id_s to=dept_s}text:develop", "fl", "id"), "/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}");
// self join on multivalued text field
assertJQ(req(p, "q", "{!join from=title to=title}name:dave", "fl", "id"), "/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}");
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s}title:MTS", "fl", "id", "debugQuery", "true"), "/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}");
// expected outcome for a sub query matching dave joined against departments
final String davesDepartments = "/response=={'numFound':2,'start':0,'docs':[{'id':'10'},{'id':'13'}]}";
// straight forward query
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s}name:dave", "fl", "id"), davesDepartments);
// variable deref for sub-query parsing
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s v=$qq}", "qq", "{!dismax}dave", "qf", "name", "fl", "id", "debugQuery", "true"), davesDepartments);
// variable deref for sub-query parsing w/localparams
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s v=$qq}", "qq", "{!dismax qf=name}dave", "fl", "id", "debugQuery", "true"), davesDepartments);
// defType local param to control sub-query parsing
assertJQ(req(p, "q", "{!join from=dept_s to=dept_id_s defType=dismax}dave", "qf", "name", "fl", "id", "debugQuery", "true"), davesDepartments);
// find people that develop stuff - but limit via filter query to a name of "john"
// this tests filters being pushed down to queries (SOLR-3062)
assertJQ(req(p, "q", "{!join from=dept_id_s to=dept_s}text:develop", "fl", "id", "fq", "name:john"), "/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}");
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class TestDistributedGrouping method simpleQuery.
private void simpleQuery(Object... queryParams) throws SolrServerException, IOException {
ModifiableSolrParams params = new ModifiableSolrParams();
for (int i = 0; i < queryParams.length; i += 2) {
params.add(queryParams[i].toString(), queryParams[i + 1].toString());
}
params.set("shards", shards);
queryServer(params);
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class TestRealTimeGet method testStressGetRealtime.
/***
@Test
public void testGetRealtime() throws Exception {
SolrQueryRequest sr1 = req("q","foo");
IndexReader r1 = sr1.getCore().getRealtimeReader();
assertU(adoc("id","1"));
IndexReader r2 = sr1.getCore().getRealtimeReader();
assertNotSame(r1, r2);
int refcount = r2.getRefCount();
// make sure a new reader wasn't opened
IndexReader r3 = sr1.getCore().getRealtimeReader();
assertSame(r2, r3);
assertEquals(refcount+1, r3.getRefCount());
assertU(commit());
// this is not critical, but currently a commit does not refresh the reader
// if nothing has changed
IndexReader r4 = sr1.getCore().getRealtimeReader();
assertEquals(refcount+2, r4.getRefCount());
r1.decRef();
r2.decRef();
r3.decRef();
r4.decRef();
sr1.close();
}
***/
@Test
public void testStressGetRealtime() throws Exception {
clearIndex();
assertU(commit());
// req().getCore().getUpdateHandler().getIndexWriterProvider().getIndexWriter(req().getCore()).setInfoStream(System.out);
final int commitPercent = 5 + random().nextInt(20);
// what percent of the commits are soft
final int softCommitPercent = 30 + random().nextInt(75);
final int deletePercent = 4 + random().nextInt(25);
final int deleteByQueryPercent = 1 + random().nextInt(5);
// percent change that an update uses optimistic locking
final int optimisticPercent = 1 + random().nextInt(50);
// percent change that a version specified will be correct
final int optimisticCorrectPercent = 25 + random().nextInt(70);
// percent of time that a get will be filtered... we normally don't want too high.
final int filteredGetPercent = random().nextInt(random().nextInt(20) + 1);
final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
int nWriteThreads = 5 + random().nextInt(25);
// number of committers at a time...
final int maxConcurrentCommits = nWriteThreads;
// query variables
final int percentRealtimeQuery = 60;
// number of query operations to perform in total
final AtomicLong operations = new AtomicLong(50000);
int nReadThreads = 5 + random().nextInt(25);
verbose("commitPercent=", commitPercent);
verbose("softCommitPercent=", softCommitPercent);
verbose("deletePercent=", deletePercent);
verbose("deleteByQueryPercent=", deleteByQueryPercent);
verbose("ndocs=", ndocs);
verbose("nWriteThreads=", nWriteThreads);
verbose("nReadThreads=", nReadThreads);
verbose("percentRealtimeQuery=", percentRealtimeQuery);
verbose("maxConcurrentCommits=", maxConcurrentCommits);
verbose("operations=", operations);
initModel(ndocs);
final AtomicInteger numCommitting = new AtomicInteger();
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < nWriteThreads; i++) {
Thread thread = new Thread("WRITER" + i) {
Random rand = new Random(random().nextInt());
@Override
public void run() {
try {
while (operations.get() > 0) {
int oper = rand.nextInt(100);
if (oper < commitPercent) {
if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
Map<Integer, DocInfo> newCommittedModel;
long version;
synchronized (TestRealTimeGet.this) {
// take a snapshot
newCommittedModel = new HashMap<>(model);
version = snapshotCount++;
verbose("took snapshot version=", version);
}
if (rand.nextInt(100) < softCommitPercent) {
verbose("softCommit start");
assertU(TestHarness.commit("softCommit", "true"));
verbose("softCommit end");
} else {
verbose("hardCommit start");
assertU(commit());
verbose("hardCommit end");
}
synchronized (TestRealTimeGet.this) {
// install this model snapshot only if it's newer than the current one
if (version >= committedModelClock) {
if (VERBOSE) {
verbose("installing new committedModel version=" + committedModelClock);
}
committedModel = newCommittedModel;
committedModelClock = version;
}
}
}
numCommitting.decrementAndGet();
continue;
}
int id = rand.nextInt(ndocs);
Object sync = syncArr[id];
// set the lastId before we actually change it sometimes to try and
// uncover more race conditions between writing and reading
boolean before = rand.nextBoolean();
if (before) {
lastId = id;
}
// Even with versions, we can't remove the sync because increasing versions does not mean increasing vals.
synchronized (sync) {
DocInfo info = model.get(id);
long val = info.val;
long nextVal = Math.abs(val) + 1;
if (oper < commitPercent + deletePercent) {
boolean opt = rand.nextInt() < optimisticPercent;
boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
long badVersion = correct ? 0 : badVersion(rand, info.version);
if (VERBOSE) {
if (!opt) {
verbose("deleting id", id, "val=", nextVal);
} else {
verbose("deleting id", id, "val=", nextVal, "existing_version=", info.version, (correct ? "" : (" bad_version=" + badVersion)));
}
}
// assertU("<delete><id>" + id + "</id></delete>");
Long version = null;
if (opt) {
if (correct) {
version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)));
} else {
try {
version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(badVersion)));
fail();
} catch (SolrException se) {
assertEquals(409, se.code());
}
}
} else {
version = deleteAndGetVersion(Integer.toString(id), null);
}
if (version != null) {
model.put(id, new DocInfo(version, -nextVal));
}
if (VERBOSE) {
verbose("deleting id", id, "val=", nextVal, "DONE");
}
} else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
if (VERBOSE) {
verbose("deleteByQuery id ", id, "val=", nextVal);
}
assertU("<delete><query>id:" + id + "</query></delete>");
model.put(id, new DocInfo(-1L, -nextVal));
if (VERBOSE) {
verbose("deleteByQuery id", id, "val=", nextVal, "DONE");
}
} else {
boolean opt = rand.nextInt() < optimisticPercent;
boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
long badVersion = correct ? 0 : badVersion(rand, info.version);
if (VERBOSE) {
if (!opt) {
verbose("adding id", id, "val=", nextVal);
} else {
verbose("adding id", id, "val=", nextVal, "existing_version=", info.version, (correct ? "" : (" bad_version=" + badVersion)));
}
}
Long version = null;
SolrInputDocument sd = sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal));
if (opt) {
if (correct) {
version = addAndGetVersion(sd, params("_version_", Long.toString(info.version)));
} else {
try {
version = addAndGetVersion(sd, params("_version_", Long.toString(badVersion)));
fail();
} catch (SolrException se) {
assertEquals(409, se.code());
}
}
} else {
version = addAndGetVersion(sd, null);
}
if (version != null) {
model.put(id, new DocInfo(version, nextVal));
}
if (VERBOSE) {
verbose("adding id", id, "val=", nextVal, "DONE");
}
}
}
if (!before) {
lastId = id;
}
}
} catch (Throwable e) {
operations.set(-1L);
throw new RuntimeException(e);
}
}
};
threads.add(thread);
}
for (int i = 0; i < nReadThreads; i++) {
Thread thread = new Thread("READER" + i) {
Random rand = new Random(random().nextInt());
@Override
public void run() {
try {
while (operations.decrementAndGet() >= 0) {
// bias toward a recently changed doc
int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
// when indexing, we update the index, then the model
// so when querying, we should first check the model, and then the index
boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
DocInfo info;
if (realTime) {
info = model.get(id);
} else {
synchronized (TestRealTimeGet.this) {
info = committedModel.get(id);
}
}
if (VERBOSE) {
verbose("querying id", id);
}
boolean filteredOut = false;
SolrQueryRequest sreq;
if (realTime) {
ModifiableSolrParams p = params("wt", "json", "qt", "/get", "ids", Integer.toString(id));
if (rand.nextInt(100) < filteredGetPercent) {
int idToFilter = rand.nextBoolean() ? id : rand.nextInt(ndocs);
filteredOut = idToFilter != id;
p.add("fq", "id:" + idToFilter);
}
sreq = req(p);
} else {
sreq = req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true");
}
String response = h.query(sreq);
Map rsp = (Map) ObjectBuilder.fromJSON(response);
List doclist = (List) (((Map) rsp.get("response")).get("docs"));
if (doclist.size() == 0) {
// there's no info we can get back with a delete, so not much we can check without further synchronization
// This is also correct when filteredOut==true
} else {
assertEquals(1, doclist.size());
long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD));
long foundVer = (Long) (((Map) doclist.get(0)).get("_version_"));
if (filteredOut || foundVal < Math.abs(info.val) || (foundVer == info.version && foundVal != info.val)) {
// if the version matches, the val must
verbose("ERROR, id=", id, "found=", response, "model", info);
assertTrue(false);
}
}
}
} catch (Throwable e) {
operations.set(-1L);
throw new RuntimeException(e);
}
}
};
threads.add(thread);
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class TestExtendedDismaxParser method testEdismaxSimpleExtension.
public void testEdismaxSimpleExtension() throws SyntaxError {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q", "foo bar");
params.set("qf", "subject title^5");
params.set("qf_fr", "subject_fr title_fr^5");
params.set("qf_en", "subject_en title_en^5");
params.set("qf_es", "subject_es title_es^5");
MultilanguageQueryParser parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params));
Query query = parser.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title", "foo", 5, false));
assertTrue(containsClause(query, "title", "bar", 5, false));
assertTrue(containsClause(query, "subject", "foo", 1, false));
assertTrue(containsClause(query, "subject", "bar", 1, false));
params.set("language", "es");
parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params));
query = parser.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title_es", "foo", 5, false));
assertTrue(containsClause(query, "title_es", "bar", 5, false));
assertTrue(containsClause(query, "subject_es", "foo", 1, false));
assertTrue(containsClause(query, "subject_es", "bar", 1, false));
FuzzyDismaxQParser parser2 = new FuzzyDismaxQParser("foo bar absence", new ModifiableSolrParams(), params, req(params));
query = parser2.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title", "foo", 5, false));
assertTrue(containsClause(query, "title", "bar", 5, false));
assertTrue(containsClause(query, "title", "absence", 5, true));
}
Aggregations