use of org.noggit.JSONParser in project lucene-solr by apache.
the class RequestParams method getMapAndVersion.
private static Object[] getMapAndVersion(SolrResourceLoader loader, String name) {
try (InputStream in = loader.openResource(name)) {
//will be always 0 for file based resourceloader
int version = 0;
if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) {
version = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion();
log.info("conf resource {} loaded . version : {} ", name, version);
}
try {
Map m = (Map) ObjectBuilder.getVal(new JSONParser(new InputStreamReader(in, StandardCharsets.UTF_8)));
return new Object[] { m, version };
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing conf resource " + name, e);
}
} catch (IOException e) {
//no problem no overlay.json file
return new Object[] { Collections.EMPTY_MAP, -1 };
}
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class SolrConfig method getConfigOverlay.
public static ConfigOverlay getConfigOverlay(SolrResourceLoader loader) {
InputStream in = null;
InputStreamReader isr = null;
try {
try {
in = loader.openResource(ConfigOverlay.RESOURCE_NAME);
} catch (IOException e) {
// hopefully no problem, assume no overlay.json file
return new ConfigOverlay(Collections.EMPTY_MAP, -1);
}
// will be always 0 for file based resourceLoader
int version = 0;
if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) {
version = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion();
log.debug("Config overlay loaded. version : {} ", version);
}
isr = new InputStreamReader(in, StandardCharsets.UTF_8);
Map m = (Map) ObjectBuilder.getVal(new JSONParser(isr));
return new ConfigOverlay(m, version);
} catch (Exception e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Error reading config overlay", e);
} finally {
IOUtils.closeQuietly(isr);
IOUtils.closeQuietly(in);
}
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class ConfigSetProperties method readFromInputStream.
public static NamedList readFromInputStream(InputStreamReader reader) {
try {
JSONParser jsonParser = new JSONParser(reader);
Object object = ObjectBuilder.getVal(jsonParser);
if (!(object instanceof Map)) {
final String objectClass = object == null ? "null" : object.getClass().getName();
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid JSON type " + objectClass + ", expected Map");
}
return new NamedList((Map) object);
} catch (Exception ex) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to load ConfigSet properties", ex);
} finally {
IOUtils.closeQuietly(reader);
}
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class SmileWriterTest method testJSON.
@Test
public void testJSON() throws IOException {
SolrQueryRequest req = req("wt", "json", "json.nl", "arrarr");
SolrQueryResponse rsp = new SolrQueryResponse();
SmileResponseWriter w = new SmileResponseWriter();
ByteArrayOutputStream buf = new ByteArrayOutputStream();
NamedList nl = new NamedList();
// make sure that 2028 and 2029 are both escaped (they are illegal in javascript)
nl.add("data1", "he
llo
!");
nl.add(null, 42);
rsp.add("nl", nl);
rsp.add("byte", Byte.valueOf((byte) -3));
rsp.add("short", Short.valueOf((short) -4));
String expected = "{\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]],byte:-3,short:-4}";
w.write(buf, req, rsp);
Map m = (Map) decodeSmile(new ByteArrayInputStream(buf.toByteArray()));
Map o2 = (Map) new ObjectBuilder(new JSONParser(new StringReader(expected))).getObject();
assertEquals(Utils.toJSONString(m), Utils.toJSONString(o2));
req.close();
}
use of org.noggit.JSONParser in project lucene-solr by apache.
the class TestBulkSchemaAPI method testDeleteAndReplace.
public void testDeleteAndReplace() throws Exception {
RestTestHarness harness = restTestHarness;
Map map = getObj(harness, "NewField1", "fields");
assertNull("Field 'NewField1' already exists in the schema", map);
map = getObj(harness, "NewField2", "fields");
assertNull("Field 'NewField2' already exists in the schema", map);
map = getObj(harness, "NewFieldType", "fieldTypes");
assertNull("'NewFieldType' field type already exists in the schema", map);
List list = getSourceCopyFields(harness, "NewField1");
assertEquals("There is already a copy field with source 'NewField1' in the schema", 0, list.size());
map = getObj(harness, "NewDynamicField1*", "dynamicFields");
assertNull("Dynamic field 'NewDynamicField1*' already exists in the schema", map);
map = getObj(harness, "NewDynamicField2*", "dynamicFields");
assertNull("Dynamic field 'NewDynamicField2*' already exists in the schema", map);
String cmds = "{\n" + " 'add-field-type': { 'name':'NewFieldType', 'class':'solr.StrField' },\n" + " 'add-field': [{ 'name':'NewField1', 'type':'NewFieldType' },\n" + " { 'name':'NewField2', 'type':'NewFieldType' },\n" + " { 'name':'NewField3', 'type':'NewFieldType' },\n" + " { 'name':'NewField4', 'type':'NewFieldType' }],\n" + " 'add-dynamic-field': [{ 'name':'NewDynamicField1*', 'type':'NewFieldType' },\n" + " { 'name':'NewDynamicField2*', 'type':'NewFieldType' },\n" + " { 'name':'NewDynamicField3*', 'type':'NewFieldType' }],\n" + " 'add-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A']},\n" + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + " {'source':'NewField4', 'dest':'NewField3' },\n" + " {'source':'NewField4', 'dest':'NewField2', maxChars: 100 },\n" + " {'source':'NewField4', 'dest':['NewField1'], maxChars: 3333 }]\n" + "}\n";
String response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(response, map.get("errors"));
map = getObj(harness, "NewFieldType", "fieldTypes");
assertNotNull("'NewFieldType' is not in the schema", map);
map = getObj(harness, "NewField1", "fields");
assertNotNull("Field 'NewField1' is not in the schema", map);
map = getObj(harness, "NewField2", "fields");
assertNotNull("Field 'NewField2' is not in the schema", map);
map = getObj(harness, "NewField3", "fields");
assertNotNull("Field 'NewField3' is not in the schema", map);
map = getObj(harness, "NewField4", "fields");
assertNotNull("Field 'NewField4' is not in the schema", map);
list = getSourceCopyFields(harness, "NewField1");
Set set = new HashSet();
for (Object obj : list) {
set.add(((Map) obj).get("dest"));
}
assertEquals(2, list.size());
assertTrue(set.contains("NewField2"));
assertTrue(set.contains("NewDynamicField1A"));
list = getSourceCopyFields(harness, "NewDynamicField1*");
assertEquals(1, list.size());
assertEquals("NewField2", ((Map) list.get(0)).get("dest"));
list = getSourceCopyFields(harness, "NewDynamicField2*");
assertEquals(1, list.size());
assertEquals("NewField2", ((Map) list.get(0)).get("dest"));
list = getSourceCopyFields(harness, "NewDynamicField3*");
assertEquals(1, list.size());
assertEquals("NewField3", ((Map) list.get(0)).get("dest"));
list = getSourceCopyFields(harness, "NewField4");
assertEquals(3, list.size());
map.clear();
for (Object obj : list) {
map.put(((Map) obj).get("dest"), ((Map) obj).get("maxChars"));
}
assertTrue(map.containsKey("NewField1"));
assertEquals(3333L, map.get("NewField1"));
assertTrue(map.containsKey("NewField2"));
assertEquals(100L, map.get("NewField2"));
assertTrue(map.containsKey("NewField3"));
assertNull(map.get("NewField3"));
cmds = "{'delete-field-type' : {'name':'NewFieldType'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
assertNotNull(errors);
assertTrue(errors.toString().contains("Can't delete 'NewFieldType' because it's the field type of "));
cmds = "{'delete-field' : {'name':'NewField1'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNotNull(errors);
assertTrue(errors.toString().contains("Can't delete field 'NewField1' because it's referred to by at least one copy field directive"));
cmds = "{'delete-field' : {'name':'NewField2'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNotNull(errors);
assertTrue(errors.toString().contains("Can't delete field 'NewField2' because it's referred to by at least one copy field directive"));
cmds = "{'replace-field' : {'name':'NewField1', 'type':'string'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
// Make sure the copy field directives with source NewField1 are preserved
list = getSourceCopyFields(harness, "NewField1");
set = new HashSet();
for (Object obj : list) {
set.add(((Map) obj).get("dest"));
}
assertEquals(2, list.size());
assertTrue(set.contains("NewField2"));
assertTrue(set.contains("NewDynamicField1A"));
cmds = "{'delete-dynamic-field' : {'name':'NewDynamicField1*'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNotNull(errors);
assertTrue(errors.toString().contains("copyField dest :'NewDynamicField1A' is not an explicit field and doesn't match a dynamicField."));
cmds = "{'replace-field' : {'name':'NewField2', 'type':'string'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNull(errors);
// Make sure the copy field directives with destination NewField2 are preserved
list = getDestCopyFields(harness, "NewField2");
set = new HashSet();
for (Object obj : list) {
set.add(((Map) obj).get("source"));
}
assertEquals(4, list.size());
assertTrue(set.contains("NewField1"));
assertTrue(set.contains("NewField4"));
assertTrue(set.contains("NewDynamicField1*"));
assertTrue(set.contains("NewDynamicField2*"));
cmds = "{'replace-dynamic-field' : {'name':'NewDynamicField2*', 'type':'string'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNull(errors);
// Make sure the copy field directives with source NewDynamicField2* are preserved
list = getSourceCopyFields(harness, "NewDynamicField2*");
assertEquals(1, list.size());
assertEquals("NewField2", ((Map) list.get(0)).get("dest"));
cmds = "{'replace-dynamic-field' : {'name':'NewDynamicField1*', 'type':'string'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
errors = map.get("errors");
assertNull(errors);
// Make sure the copy field directives with destinations matching NewDynamicField1* are preserved
list = getDestCopyFields(harness, "NewDynamicField1A");
assertEquals(1, list.size());
assertEquals("NewField1", ((Map) list.get(0)).get("source"));
cmds = "{'replace-field-type': {'name':'NewFieldType', 'class':'solr.BinaryField'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
// Make sure the copy field directives with sources and destinations of type NewFieldType are preserved
list = getDestCopyFields(harness, "NewField3");
assertEquals(2, list.size());
set = new HashSet();
for (Object obj : list) {
set.add(((Map) obj).get("source"));
}
assertTrue(set.contains("NewField4"));
assertTrue(set.contains("NewDynamicField3*"));
cmds = "{\n" + " 'delete-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A'] },\n" + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + " {'source':'NewField4', 'dest':['NewField1', 'NewField2', 'NewField3']}]\n" + "}\n";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
list = getSourceCopyFields(harness, "NewField1");
assertEquals(0, list.size());
list = getSourceCopyFields(harness, "NewDynamicField1*");
assertEquals(0, list.size());
list = getSourceCopyFields(harness, "NewDynamicField2*");
assertEquals(0, list.size());
list = getSourceCopyFields(harness, "NewDynamicField3*");
assertEquals(0, list.size());
list = getSourceCopyFields(harness, "NewField4");
assertEquals(0, list.size());
cmds = "{'delete-field': [{'name':'NewField1'},{'name':'NewField2'},{'name':'NewField3'},{'name':'NewField4'}]}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
cmds = "{'delete-dynamic-field': [{'name':'NewDynamicField1*'}," + " {'name':'NewDynamicField2*'},\n" + " {'name':'NewDynamicField3*'}]\n" + "}\n";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
cmds = "{'delete-field-type':{'name':'NewFieldType'}}";
response = harness.post("/schema?wt=json", json(cmds));
map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertNull(map.get("errors"));
}
Aggregations