use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestSolrConfigHandlerConcurrent method invokeBulkCall.
private void invokeBulkCall(String cacheName, List<String> errs, Map val) throws Exception {
String payload = "{" + "'set-property' : {'query.CACHENAME.size':'CACHEVAL1'," + " 'query.CACHENAME.initialSize':'CACHEVAL2'}," + "'set-property': {'query.CACHENAME.autowarmCount' : 'CACHEVAL3'}" + "}";
Set<String> errmessages = new HashSet<>();
for (int i = 1; i < 2; i++) {
//make it ahigher number
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response;
String val1;
String val2;
String val3;
try {
payload = payload.replaceAll("CACHENAME", cacheName);
val1 = String.valueOf(10 * i + 1);
payload = payload.replace("CACHEVAL1", val1);
val2 = String.valueOf(10 * i + 2);
payload = payload.replace("CACHEVAL2", val2);
val3 = String.valueOf(10 * i + 3);
payload = payload.replace("CACHEVAL3", val3);
response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
} finally {
publisher.close();
}
Map map = (Map) getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1");
List<String> urls = new ArrayList<>();
for (Slice slice : coll.getSlices()) {
for (Replica replica : slice.getReplicas()) urls.add("" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP));
}
//get another node
String url = urls.get(urls.size());
long startTime = System.nanoTime();
long maxTimeoutSeconds = 20;
while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) {
Thread.sleep(100);
errmessages.clear();
Map respMap = getAsMap(url + "/config/overlay?wt=json", cloudClient);
Map m = (Map) respMap.get("overlay");
if (m != null)
m = (Map) m.get("props");
if (m == null) {
errmessages.add(StrUtils.formatString("overlay does not exist for cache: {0} , iteration: {1} response {2} ", cacheName, i, respMap.toString()));
continue;
}
Object o = getObjectByPath(m, true, asList("query", cacheName, "size"));
if (!val1.equals(o))
errmessages.add(StrUtils.formatString("'size' property not set, expected = {0}, actual {1}", val1, o));
o = getObjectByPath(m, true, asList("query", cacheName, "initialSize"));
if (!val2.equals(o))
errmessages.add(StrUtils.formatString("'initialSize' property not set, expected = {0}, actual {1}", val2, o));
o = getObjectByPath(m, true, asList("query", cacheName, "autowarmCount"));
if (!val3.equals(o))
errmessages.add(StrUtils.formatString("'autowarmCount' property not set, expected = {0}, actual {1}", val3, o));
if (errmessages.isEmpty())
break;
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
return;
}
}
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestBulkSchemaConcurrent method invokeBulkDeleteCall.
private void invokeBulkDeleteCall(int seed, ArrayList<String> errs) throws Exception {
String payload = "{\n" + " 'delete-copy-field' : {\n" + " 'source' :'replaceFieldA',\n" + " 'dest':['replaceDynamicCopyFieldDest']\n" + " },\n" + " 'delete-field' : {'name':'replaceFieldA'},\n" + " 'delete-dynamic-field' : {'name' :'replaceDynamicField'},\n" + " 'delete-field-type' : {'name' :'myNewFieldTypeName'}\n" + " }";
String aField = "a" + seed;
String dynamicFldName = "*_lol" + seed;
String dynamicCopyFldDest = "hello_lol" + seed;
String newFieldTypeName = "mystr" + seed;
payload = payload.replace("replaceFieldA", aField);
payload = payload.replace("replaceDynamicField", dynamicFldName);
payload = payload.replace("replaceDynamicCopyFieldDest", dynamicCopyFldDest);
payload = payload.replace("myNewFieldTypeName", newFieldTypeName);
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response = publisher.post("/schema?wt=json", SolrTestCaseJ4.json(payload));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
//get another node
Set<String> errmessages = new HashSet<>();
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
try {
long startTime = System.nanoTime();
long maxTimeoutMillis = 100000;
while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if (m != null)
errmessages.add(StrUtils.formatString("field {0} still exists", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if (m != null)
errmessages.add(StrUtils.formatString("dynamic field {0} still exists", dynamicFldName));
List l = getSourceCopyFields(harness, aField);
if (checkCopyField(l, aField, dynamicCopyFldDest))
errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} still exists", aField, dynamicCopyFldDest));
m = getObj(harness, newFieldTypeName, "fieldTypes");
if (m != null)
errmessages.add(StrUtils.formatString("new type {0} still exists", newFieldTypeName));
if (errmessages.isEmpty())
break;
Thread.sleep(10);
}
} finally {
harness.close();
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
}
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestBulkSchemaAPI method testAnalyzerClass.
public void testAnalyzerClass() throws Exception {
String addFieldTypeAnalyzerWithClass = "{\n" + "'add-field-type' : {" + " 'name' : 'myNewTextFieldWithAnalyzerClass',\n" + " 'class':'solr.TextField',\n" + " 'analyzer' : {\n" + " 'luceneMatchVersion':'5.0.0',\n" + " 'class':'org.apache.lucene.analysis.core.WhitespaceAnalyzer'\n";
String charFilters = " 'charFilters' : [{\n" + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + " 'replacement':'$1$1',\n" + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + " }],\n";
String tokenizer = " 'tokenizer' : { 'class':'solr.WhitespaceTokenizerFactory' },\n";
String filters = " 'filters' : [{ 'class':'solr.ASCIIFoldingFilterFactory' }]\n";
String suffix = " }\n" + "}}";
String response = restTestHarness.post("/schema?wt=json", json(addFieldTypeAnalyzerWithClass + ',' + charFilters + tokenizer + filters + suffix));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
List list = (List) map.get("errors");
List errorList = (List) ((Map) list.get(0)).get("errorMessages");
assertEquals(1, errorList.size());
assertTrue(((String) errorList.get(0)).contains("An analyzer with a class property may not define any char filters!"));
response = restTestHarness.post("/schema?wt=json", json(addFieldTypeAnalyzerWithClass + ',' + tokenizer + filters + suffix));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
list = (List) map.get("errors");
errorList = (List) ((Map) list.get(0)).get("errorMessages");
assertEquals(1, errorList.size());
assertTrue(((String) errorList.get(0)).contains("An analyzer with a class property may not define a tokenizer!"));
response = restTestHarness.post("/schema?wt=json", json(addFieldTypeAnalyzerWithClass + ',' + filters + suffix));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
list = (List) map.get("errors");
errorList = (List) ((Map) list.get(0)).get("errorMessages");
assertEquals(1, errorList.size());
assertTrue(((String) errorList.get(0)).contains("An analyzer with a class property may not define any filters!"));
response = restTestHarness.post("/schema?wt=json", json(addFieldTypeAnalyzerWithClass + suffix));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(response, map.get("errors"));
map = getObj(restTestHarness, "myNewTextFieldWithAnalyzerClass", "fieldTypes");
assertNotNull(map);
Map analyzer = (Map) map.get("analyzer");
assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class")));
assertEquals("5.0.0", String.valueOf(analyzer.get("luceneMatchVersion")));
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestBulkSchemaAPI method testMultipleCommands.
public void testMultipleCommands() throws Exception {
RestTestHarness harness = restTestHarness;
Map m = getObj(harness, "wdf_nocase", "fields");
assertNotNull("'wdf_nocase' field does not exist in the schema", m);
m = getObj(harness, "wdf_nocase", "fieldTypes");
assertNotNull("'wdf_nocase' field type does not exist in the schema", m);
m = getObj(harness, "boolean", "fieldTypes");
assertNotNull("'boolean' field type does not exist in the schema", m);
assertNull(m.get("sortMissingFirst"));
assertTrue((Boolean) m.get("sortMissingLast"));
m = getObj(harness, "name", "fields");
assertNotNull("'name' field does not exist in the schema", m);
assertEquals("nametext", m.get("type"));
m = getObj(harness, "bind", "fields");
assertNotNull("'bind' field does not exist in the schema", m);
assertEquals("boolean", m.get("type"));
m = getObj(harness, "attr_*", "dynamicFields");
assertNotNull("'attr_*' dynamic field does not exist in the schema", m);
assertEquals("text", m.get("type"));
List l = getSourceCopyFields(harness, "*_i");
Set s = new HashSet();
assertEquals(4, l.size());
s.add(((Map) l.get(0)).get("dest"));
s.add(((Map) l.get(1)).get("dest"));
s.add(((Map) l.get(2)).get("dest"));
s.add(((Map) l.get(3)).get("dest"));
assertTrue(s.contains("title"));
assertTrue(s.contains("*_s"));
String payload = "{\n" + " 'add-field' : {\n" + " 'name':'a1',\n" + " 'type': 'string',\n" + " 'stored':true,\n" + " 'indexed':false\n" + " },\n" + " 'add-field' : {\n" + " 'name':'a2',\n" + " 'type': 'string',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'add-dynamic-field' : {\n" + " 'name' :'*_lol',\n" + " 'type':'string',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'add-copy-field' : {\n" + " 'source' :'a1',\n" + " 'dest':['a2','hello_lol']\n" + " },\n" + " 'add-field-type' : {\n" + " 'name' :'mystr',\n" + " 'class' : 'solr.StrField',\n" + " 'sortMissingLast':'true'\n" + " },\n" + " 'add-field-type' : {" + " 'name' : 'myNewTxtField',\n" + " 'class':'solr.TextField',\n" + " 'positionIncrementGap':'100',\n" + " 'indexAnalyzer' : {\n" + " 'charFilters':[\n" + " {\n" + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + " 'replacement':'$1$1',\n" + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + " }\n" + " ],\n" + " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + " 'filters':[\n" + " {\n" + " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + " 'preserveOriginal':'0'\n" + " },\n" + " {\n" + " 'class':'solr.StopFilterFactory',\n" + " 'words':'stopwords.txt',\n" + " 'ignoreCase':'true'\n" + " },\n" + " {'class':'solr.LowerCaseFilterFactory'},\n" + " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + " {'class':'solr.KStemFilterFactory'},\n" + " {'class':'solr.FlattenGraphFilterFactory'}\n" + " ]\n" + " },\n" + " 'queryAnalyzer' : {\n" + " 'charFilters':[\n" + " {\n" + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + " 'replacement':'$1$1',\n" + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + " }\n" + " ],\n" + " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + " 'filters':[\n" + " {\n" + " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + " 'preserveOriginal':'0'\n" + " },\n" + " {\n" + " 'class':'solr.StopFilterFactory',\n" + " 'words':'stopwords.txt',\n" + " 'ignoreCase':'true'\n" + " },\n" + " {'class':'solr.LowerCaseFilterFactory'},\n" + " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + " {'class':'solr.KStemFilterFactory'}\n" + " ]\n" + " }\n" + " },\n" + " 'add-field' : {\n" + " 'name':'a3',\n" + " 'type': 'myNewTxtField',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'add-field-type' : {" + " 'name' : 'myWhitespaceTxtField',\n" + " 'class':'solr.TextField',\n" + " 'analyzer' : {'class' : 'org.apache.lucene.analysis.core.WhitespaceAnalyzer'}\n" + " },\n" + " 'add-field' : {\n" + " 'name':'a5',\n" + " 'type': 'myWhitespaceTxtField',\n" + " 'stored':true\n" + " },\n" + " 'add-field-type' : {" + " 'name' : 'mySimField',\n" + " 'class':'solr.TextField',\n" + " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + " 'similarity' : {'class':'org.apache.lucene.misc.SweetSpotSimilarity'}\n" + " },\n" + " 'add-field' : {\n" + " 'name':'a4',\n" + " 'type': 'mySimField',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'delete-field' : {'name':'wdf_nocase'},\n" + " 'delete-field-type' : {'name':'wdf_nocase'},\n" + " 'delete-dynamic-field' : {'name':'*_tt'},\n" + " 'delete-copy-field' : {'source':'a1', 'dest':'a2'},\n" + " 'delete-copy-field' : {'source':'*_i', 'dest':['title', '*_s']},\n" + " 'replace-field-type' : {\n" + " 'name':'boolean',\n" + " 'class':'solr.BoolField',\n" + " 'sortMissingFirst':true\n" + " },\n" + " 'replace-field' : {\n" + " 'name':'name',\n" + " 'type':'string',\n" + " 'indexed':true,\n" + " 'stored':true\n" + " },\n" + " 'replace-dynamic-field' : {\n" + " 'name':'attr_*',\n" + " 'type':'string',\n" + " 'indexed':true,\n" + " 'stored':true,\n" + " 'multiValued':true\n" + " }\n" + " }\n";
String response = harness.post("/schema?wt=json", json(payload));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(response, map.get("errors"));
m = getObj(harness, "a1", "fields");
assertNotNull("field a1 not created", m);
assertEquals("string", m.get("type"));
assertEquals(Boolean.TRUE, m.get("stored"));
assertEquals(Boolean.FALSE, m.get("indexed"));
m = getObj(harness, "a2", "fields");
assertNotNull("field a2 not created", m);
assertEquals("string", m.get("type"));
assertEquals(Boolean.TRUE, m.get("stored"));
assertEquals(Boolean.TRUE, m.get("indexed"));
m = getObj(harness, "*_lol", "dynamicFields");
assertNotNull("field *_lol not created", m);
assertEquals("string", m.get("type"));
assertEquals(Boolean.TRUE, m.get("stored"));
assertEquals(Boolean.TRUE, m.get("indexed"));
l = getSourceCopyFields(harness, "a1");
s = new HashSet();
assertEquals(1, l.size());
s.add(((Map) l.get(0)).get("dest"));
assertTrue(s.contains("hello_lol"));
l = getSourceCopyFields(harness, "*_i");
s = new HashSet();
assertEquals(2, l.size());
s.add(((Map) l.get(0)).get("dest"));
s.add(((Map) l.get(1)).get("dest"));
assertFalse(s.contains("title"));
assertFalse(s.contains("*_s"));
m = getObj(harness, "mystr", "fieldTypes");
assertNotNull(m);
assertEquals("solr.StrField", m.get("class"));
assertEquals("true", String.valueOf(m.get("sortMissingLast")));
m = getObj(harness, "myNewTxtField", "fieldTypes");
assertNotNull(m);
m = getObj(harness, "a3", "fields");
assertNotNull("field a3 not created", m);
assertEquals("myNewTxtField", m.get("type"));
m = getObj(harness, "mySimField", "fieldTypes");
assertNotNull(m);
m = (Map) m.get("similarity");
assertNotNull(m);
assertEquals(SweetSpotSimilarity.class.getName(), m.get("class"));
m = getObj(harness, "a4", "fields");
assertNotNull("field a4 not created", m);
assertEquals("mySimField", m.get("type"));
assertFieldSimilarity("a4", SweetSpotSimilarity.class);
m = getObj(harness, "myWhitespaceTxtField", "fieldTypes");
assertNotNull(m);
// unspecified, expect default
assertNull(m.get("similarity"));
m = getObj(harness, "a5", "fields");
assertNotNull("field a5 not created", m);
assertEquals("myWhitespaceTxtField", m.get("type"));
// unspecified, expect default
assertFieldSimilarity("a5", BM25Similarity.class);
m = getObj(harness, "wdf_nocase", "fields");
assertNull("field 'wdf_nocase' not deleted", m);
m = getObj(harness, "wdf_nocase", "fieldTypes");
assertNull("field type 'wdf_nocase' not deleted", m);
m = getObj(harness, "*_tt", "dynamicFields");
assertNull("dynamic field '*_tt' not deleted", m);
m = getObj(harness, "boolean", "fieldTypes");
assertNotNull("'boolean' field type does not exist in the schema", m);
assertNull(m.get("sortMissingLast"));
assertTrue((Boolean) m.get("sortMissingFirst"));
// this field will be rebuilt when "boolean" field type is replaced
m = getObj(harness, "bind", "fields");
assertNotNull("'bind' field does not exist in the schema", m);
m = getObj(harness, "name", "fields");
assertNotNull("'name' field does not exist in the schema", m);
assertEquals("string", m.get("type"));
m = getObj(harness, "attr_*", "dynamicFields");
assertNotNull("'attr_*' dynamic field does not exist in the schema", m);
assertEquals("string", m.get("type"));
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestBulkSchemaAPI method testAddFieldMatchingExistingDynamicField.
public void testAddFieldMatchingExistingDynamicField() throws Exception {
RestTestHarness harness = restTestHarness;
String newFieldName = "attr_non_dynamic";
Map map = getObj(harness, newFieldName, "fields");
assertNull("Field '" + newFieldName + "' already exists in the schema", map);
map = getObj(harness, "attr_*", "dynamicFields");
assertNotNull("'attr_*' dynamic field does not exist in the schema", map);
map = getObj(harness, "boolean", "fieldTypes");
assertNotNull("'boolean' field type does not exist in the schema", map);
String payload = "{\n" + " 'add-field' : {\n" + " 'name':'" + newFieldName + "',\n" + " 'type':'boolean',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " }\n" + " }";
String response = harness.post("/schema?wt=json", json(payload));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(response, map.get("errors"));
map = getObj(harness, newFieldName, "fields");
assertNotNull("Field '" + newFieldName + "' is not in the schema", map);
}
Aggregations