use of org.apache.solr.util.RestTestHarness in project lucene-solr by apache.
the class TestSolrConfigHandlerCloud method setupHarnesses.
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(((HttpSolrClient) client)::getBaseURL);
restTestHarnesses.add(harness);
}
}
use of org.apache.solr.util.RestTestHarness in project lucene-solr by apache.
the class TestSolrConfigHandlerConcurrent method invokeBulkCall.
private void invokeBulkCall(String cacheName, List<String> errs, Map val) throws Exception {
String payload = "{" + "'set-property' : {'query.CACHENAME.size':'CACHEVAL1'," + " 'query.CACHENAME.initialSize':'CACHEVAL2'}," + "'set-property': {'query.CACHENAME.autowarmCount' : 'CACHEVAL3'}" + "}";
Set<String> errmessages = new HashSet<>();
for (int i = 1; i < 2; i++) {
//make it ahigher number
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response;
String val1;
String val2;
String val3;
try {
payload = payload.replaceAll("CACHENAME", cacheName);
val1 = String.valueOf(10 * i + 1);
payload = payload.replace("CACHEVAL1", val1);
val2 = String.valueOf(10 * i + 2);
payload = payload.replace("CACHEVAL2", val2);
val3 = String.valueOf(10 * i + 3);
payload = payload.replace("CACHEVAL3", val3);
response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
} finally {
publisher.close();
}
Map map = (Map) getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1");
List<String> urls = new ArrayList<>();
for (Slice slice : coll.getSlices()) {
for (Replica replica : slice.getReplicas()) urls.add("" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP));
}
//get another node
String url = urls.get(urls.size());
long startTime = System.nanoTime();
long maxTimeoutSeconds = 20;
while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) {
Thread.sleep(100);
errmessages.clear();
Map respMap = getAsMap(url + "/config/overlay?wt=json", cloudClient);
Map m = (Map) respMap.get("overlay");
if (m != null)
m = (Map) m.get("props");
if (m == null) {
errmessages.add(StrUtils.formatString("overlay does not exist for cache: {0} , iteration: {1} response {2} ", cacheName, i, respMap.toString()));
continue;
}
Object o = getObjectByPath(m, true, asList("query", cacheName, "size"));
if (!val1.equals(o))
errmessages.add(StrUtils.formatString("'size' property not set, expected = {0}, actual {1}", val1, o));
o = getObjectByPath(m, true, asList("query", cacheName, "initialSize"));
if (!val2.equals(o))
errmessages.add(StrUtils.formatString("'initialSize' property not set, expected = {0}, actual {1}", val2, o));
o = getObjectByPath(m, true, asList("query", cacheName, "autowarmCount"));
if (!val3.equals(o))
errmessages.add(StrUtils.formatString("'autowarmCount' property not set, expected = {0}, actual {1}", val3, o));
if (errmessages.isEmpty())
break;
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
return;
}
}
}
use of org.apache.solr.util.RestTestHarness in project lucene-solr by apache.
the class TestSolrConfigHandlerConcurrent method setupHarnesses.
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(() -> ((HttpSolrClient) client).getBaseURL());
restTestHarnesses.add(harness);
}
}
use of org.apache.solr.util.RestTestHarness in project lucene-solr by apache.
the class TestCloudSchemaless method setupHarnesses.
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(() -> ((HttpSolrClient) client).getBaseURL());
restTestHarnesses.add(harness);
}
}
use of org.apache.solr.util.RestTestHarness in project lucene-solr by apache.
the class TestCloudSchemaless method test.
@Test
@ShardsFixed(num = 8)
public void test() throws Exception {
setupHarnesses();
// First, add a bunch of documents in a single update with the same new field.
// This tests that the replicas properly handle schema additions.
int slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getActiveSlices("collection1").size();
int trials = 50;
// generate enough docs so that we can expect at least a doc per slice
int numDocsPerTrial = (int) (slices * (Math.log(slices) + 1));
SolrClient randomClient = clients.get(random().nextInt(clients.size()));
int docNumber = 0;
for (int i = 0; i < trials; ++i) {
List<SolrInputDocument> docs = new ArrayList<>();
for (int j = 0; j < numDocsPerTrial; ++j) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", Long.toHexString(Double.doubleToLongBits(random().nextDouble())));
doc.addField("newTestFieldInt" + docNumber++, "123");
doc.addField("constantField", "3.14159");
docs.add(doc);
}
randomClient.add(docs);
}
randomClient.commit();
String[] expectedFields = getExpectedFieldResponses(docNumber);
// Check that all the fields were added
for (RestTestHarness client : restTestHarnesses) {
String request = "/schema/fields?wt=xml";
String response = client.query(request);
String result = BaseTestHarness.validateXPath(response, expectedFields);
if (result != null) {
String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response;
log.error(msg);
fail(msg);
}
}
// Now, let's ensure that writing the same field with two different types fails
int failTrials = 50;
for (int i = 0; i < failTrials; ++i) {
List<SolrInputDocument> docs = null;
SolrInputDocument intDoc = new SolrInputDocument();
intDoc.addField("id", Long.toHexString(Double.doubleToLongBits(random().nextDouble())));
intDoc.addField("longOrDateField" + i, "123");
SolrInputDocument dateDoc = new SolrInputDocument();
dateDoc.addField("id", Long.toHexString(Double.doubleToLongBits(random().nextDouble())));
dateDoc.addField("longOrDateField" + i, "1995-12-31T23:59:59Z");
// randomize the order of the docs
if (random().nextBoolean()) {
docs = Arrays.asList(intDoc, dateDoc);
} else {
docs = Arrays.asList(dateDoc, intDoc);
}
try {
randomClient.add(docs);
randomClient.commit();
fail("Expected Bad Request Exception");
} catch (SolrException se) {
assertEquals(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode(se.code()));
}
try {
CloudSolrClient cloudSolrClient = getCommonCloudSolrClient();
cloudSolrClient.add(docs);
cloudSolrClient.commit();
fail("Expected Bad Request Exception");
} catch (SolrException ex) {
assertEquals(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode((ex).code()));
}
}
}
Aggregations