use of org.apache.solr.client.solrj.SolrClient in project lucene-solr by apache.
the class TestJsonFacets method testOverrequest.
@Test
public void testOverrequest() throws Exception {
initServers();
Client client = servers.getClient(random().nextInt());
client.queryDefaults().set("shards", servers.getShards(), "debugQuery", Boolean.toString(random().nextBoolean()));
List<SolrClient> clients = client.getClientProvider().all();
assertTrue(clients.size() >= 3);
client.deleteByQuery("*:*", null);
ModifiableSolrParams p = params("cat_s", "cat_s");
String cat_s = p.get("cat_s");
// A will win tiebreak
clients.get(0).add(sdoc("id", "1", cat_s, "A"));
clients.get(0).add(sdoc("id", "2", cat_s, "B"));
clients.get(1).add(sdoc("id", "3", cat_s, "B"));
// A will win tiebreak
clients.get(1).add(sdoc("id", "4", cat_s, "A"));
clients.get(2).add(sdoc("id", "5", cat_s, "B"));
clients.get(2).add(sdoc("id", "6", cat_s, "B"));
client.commit();
// Shard responses should be A=1, A=1, B=2, merged should be "A=2, B=2" hence A wins tiebreak
client.testJQ(params(p, "q", "*:*", "json.facet", "{" + "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + // -1 is default overrequest
",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // make sure overflows don't mess us up
",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + "}"), "facets=={ count:6" + // with no overrequest, we incorrectly conclude that A is the top bucket
", cat0:{ buckets:[ {val:A,count:2} ] }" + ", cat1:{ buckets:[ {val:B,count:4} ] }" + ", catDef:{ buckets:[ {val:B,count:4} ] }" + ", catBig:{ buckets:[ {val:A,count:2} ] }" + "}");
}
use of org.apache.solr.client.solrj.SolrClient in project lucene-solr by apache.
the class TestNamedUpdateProcessors method setupHarnesses.
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(() -> ((HttpSolrClient) client).getBaseURL());
restTestHarnesses.add(harness);
}
}
use of org.apache.solr.client.solrj.SolrClient in project lucene-solr by apache.
the class DistributedFacetPivotLargeTest method setupDistributedPivotFacetDocuments.
private void setupDistributedPivotFacetDocuments() throws Exception {
//Clear docs
del("*:*");
commit();
final int maxDocs = 50;
final SolrClient zeroShard = clients.get(0);
final SolrClient oneShard = clients.get(1);
final SolrClient twoShard = clients.get(2);
// edge case: never gets any matching docs
final SolrClient threeShard = clients.get(3);
for (Integer i = 0; i < maxDocs; i++) {
//50 entries
addPivotDoc(zeroShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft polecat bbc", "pay_i", 2400, "hiredate_dt", "2012-07-01T12:30:00Z", "real_b", "true");
addPivotDoc(zeroShard, "id", getDocNum(), "place_s", "medical staffing network holdings, inc.", "company_t", "microsoft polecat bbc", "pay_i", 2400, "hiredate_dt", "2012-07-01T12:30:00Z");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "placeholder" + i, "company_t", "compHolder" + i, "pay_i", 24 * i, "hiredate_dt", "2012-08-01T12:30:00Z");
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "bbc honda", "pay_i", 2400, "hiredate_dt", "2012-09-01T12:30:00Z", "real_b", "true");
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "compHolder" + i, "pay_i", 22 * i, "hiredate_dt", "2012-09-01T12:30:00Z", "real_b", "true");
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "compHolder" + i, "pay_i", 21 * i, "hiredate_dt", "2012-09-01T12:30:00Z", "real_b", "true");
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "compHolder" + i, "pay_i", 20 * i, "hiredate_dt", "2012-09-01T12:30:00Z", "real_b", "true");
//For the filler content
//Fifty places with 6 results each
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 2400, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 3100, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 3400, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 5400, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 6400, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", i + "placeholder", "company_t", "microsoft polecat bbc", "pay_i", 2000, "hiredate_dt", "2012-10-01T12:30:00Z", "real_b", "false");
}
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 4367, "hiredate_dt", "2012-11-01T12:30:00Z");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft bbc", "pay_i", 8742, "hiredate_dt", "2012-11-01T12:30:00Z");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft polecat", "pay_i", 5824, "hiredate_dt", "2012-11-01T12:30:00Z");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft ", "pay_i", 6539, "hiredate_dt", "2012-11-01T12:30:00Z");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "medical staffing network holdings, inc.", "company_t", "microsoft ", "pay_i", 6539, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", "xxx");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "polecat", "pay_i", 4352, "hiredate_dt", "2012-01-01T12:30:00Z", "special_s", "xxx");
addPivotDoc(oneShard, "id", getDocNum(), "place_s", "krakaw", "company_t", "polecat", "pay_i", 4352, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 12, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
addPivotDoc(twoShard, "id", getDocNum(), "place_s", "cardiff", "company_t", "microsoft", "pay_i", 543, "hiredate_dt", "2012-11-01T12:30:00Z", "special_s", SPECIAL);
// two really trivial documents, unrelated to the rest of the tests,
// for the purpose of demoing the porblem with mincount=0
addPivotDoc(oneShard, "id", getDocNum(), "top_s", "aaa", "sub_s", "bbb");
addPivotDoc(twoShard, "id", getDocNum(), "top_s", "xxx", "sub_s", "yyy");
commit();
assertEquals("shard #3 should never have any docs", 0, threeShard.query(params("q", "*:*")).getResults().getNumFound());
}
use of org.apache.solr.client.solrj.SolrClient in project lucene-solr by apache.
the class DistributedSpellCheckComponentTest method q.
private void q(Object... q) throws Exception {
final ModifiableSolrParams params = new ModifiableSolrParams();
for (int i = 0; i < q.length; i += 2) {
params.add(q[i].toString(), q[i + 1].toString());
}
controlClient.query(params);
// query a random server
params.set("shards", shards);
int which = r.nextInt(clients.size());
SolrClient client = clients.get(which);
client.query(params);
}
use of org.apache.solr.client.solrj.SolrClient in project lucene-solr by apache.
the class TestRemoteStreaming method doBefore.
@Before
public void doBefore() throws IOException, SolrServerException {
//add document and commit, and ensure it's there
SolrClient client = getSolrClient();
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "1234");
client.add(doc);
client.commit();
assertTrue(searchFindsIt());
}
Aggregations