use of org.apache.solr.common.MapSerializable in project lucene-solr by apache.
the class SolrConfig method toMap.
@Override
public Map<String, Object> toMap(Map<String, Object> result) {
if (getZnodeVersion() > -1)
result.put(ZNODEVER, getZnodeVersion());
result.put("luceneMatchVersion", luceneMatchVersion);
result.put("updateHandler", getUpdateHandlerInfo());
Map m = new LinkedHashMap();
result.put("query", m);
m.put("useFilterForSortedQuery", useFilterForSortedQuery);
m.put("queryResultWindowSize", queryResultWindowSize);
m.put("queryResultMaxDocsCached", queryResultMaxDocsCached);
m.put("enableLazyFieldLoading", enableLazyFieldLoading);
m.put("maxBooleanClauses", booleanQueryMaxClauseCount);
for (SolrPluginInfo plugin : plugins) {
List<PluginInfo> infos = getPluginInfos(plugin.clazz.getName());
if (infos == null || infos.isEmpty())
continue;
String tag = plugin.getCleanTag();
tag = tag.replace("/", "");
if (plugin.options.contains(PluginOpts.REQUIRE_NAME)) {
LinkedHashMap items = new LinkedHashMap();
for (PluginInfo info : infos) items.put(info.name, info);
for (Map.Entry e : overlay.getNamedPlugins(plugin.tag).entrySet()) items.put(e.getKey(), e.getValue());
result.put(tag, items);
} else {
if (plugin.options.contains(MULTI_OK)) {
ArrayList<MapSerializable> l = new ArrayList<>();
for (PluginInfo info : infos) l.add(info);
result.put(tag, l);
} else {
result.put(tag, infos.get(0));
}
}
}
addCacheConfig(m, filterCacheConfig, queryResultCacheConfig, documentCacheConfig, fieldValueCacheConfig);
m = new LinkedHashMap();
result.put("requestDispatcher", m);
m.put("handleSelect", handleSelect);
if (httpCachingConfig != null)
m.put("httpCaching", httpCachingConfig);
m.put("requestParsers", makeMap("multipartUploadLimitKB", multipartUploadLimitKB, "formUploadLimitKB", formUploadLimitKB, "addHttpRequestToContext", addHttpRequestToContext));
if (indexConfig != null)
result.put("indexConfig", indexConfig);
m = new LinkedHashMap();
result.put("peerSync", m);
m.put("useRangeVersions", useRangeVersionsForPeerSync);
return result;
}
use of org.apache.solr.common.MapSerializable in project lucene-solr by apache.
the class TextResponseWriter method writeVal.
public final void writeVal(String name, Object val) throws IOException {
// go in order of most common to least common
if (val == null) {
writeNull(name);
} else if (val instanceof String) {
writeStr(name, val.toString(), true);
// micro-optimization... using toString() avoids a cast first
} else if (val instanceof IndexableField) {
IndexableField f = (IndexableField) val;
SchemaField sf = schema.getFieldOrNull(f.name());
if (sf != null) {
sf.getType().write(this, name, f);
} else {
writeStr(name, f.stringValue(), true);
}
} else if (val instanceof Number) {
writeNumber(name, (Number) val);
} else if (val instanceof Boolean) {
writeBool(name, (Boolean) val);
} else if (val instanceof Date) {
writeDate(name, (Date) val);
} else if (val instanceof Document) {
SolrDocument doc = DocsStreamer.convertLuceneDocToSolrDoc((Document) val, schema);
writeSolrDocument(name, doc, returnFields, 0);
} else if (val instanceof SolrDocument) {
writeSolrDocument(name, (SolrDocument) val, returnFields, 0);
} else if (val instanceof ResultContext) {
// requires access to IndexReader
writeDocuments(name, (ResultContext) val);
} else if (val instanceof DocList) {
// Should not happen normally
ResultContext ctx = new BasicResultContext((DocList) val, returnFields, null, null, req);
writeDocuments(name, ctx);
// }
// else if (val instanceof DocSet) {
// how do we know what fields to read?
// todo: have a DocList/DocSet wrapper that
// restricts the fields to write...?
} else if (val instanceof SolrDocumentList) {
writeSolrDocumentList(name, (SolrDocumentList) val, returnFields);
} else if (val instanceof Map) {
writeMap(name, (Map) val, false, true);
} else if (val instanceof NamedList) {
writeNamedList(name, (NamedList) val);
} else if (val instanceof Path) {
writeStr(name, ((Path) val).toAbsolutePath().toString(), true);
} else if (val instanceof IteratorWriter) {
writeIterator((IteratorWriter) val);
} else if (val instanceof Iterable) {
writeArray(name, ((Iterable) val).iterator());
} else if (val instanceof Object[]) {
writeArray(name, (Object[]) val);
} else if (val instanceof Iterator) {
writeArray(name, (Iterator) val);
} else if (val instanceof byte[]) {
byte[] arr = (byte[]) val;
writeByteArr(name, arr, 0, arr.length);
} else if (val instanceof BytesRef) {
BytesRef arr = (BytesRef) val;
writeByteArr(name, arr.bytes, arr.offset, arr.length);
} else if (val instanceof EnumFieldValue) {
writeStr(name, val.toString(), true);
} else if (val instanceof WriteableValue) {
((WriteableValue) val).write(name, this);
} else if (val instanceof MapWriter) {
writeMap((MapWriter) val);
} else if (val instanceof MapSerializable) {
//todo find a better way to reuse the map more efficiently
writeMap(name, ((MapSerializable) val).toMap(new LinkedHashMap<>()), false, true);
} else {
// default... for debugging only
writeStr(name, val.getClass().getName() + ':' + val.toString(), true);
}
}
use of org.apache.solr.common.MapSerializable in project lucene-solr by apache.
the class SolrIndexConfigTest method testToMap.
public void testToMap() throws Exception {
final String solrConfigFileNameWarmer = solrConfigFileNameWarmerRandomMergePolicyFactory;
final String solrConfigFileNameTMP = solrConfigFileNameTieredMergePolicyFactory;
final String solrConfigFileName = (random().nextBoolean() ? solrConfigFileNameWarmer : solrConfigFileNameTMP);
SolrConfig solrConfig = new SolrConfig(instanceDir, solrConfigFileName, null);
SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null, null);
assertNotNull(solrIndexConfig);
assertNotNull(solrIndexConfig.mergePolicyFactoryInfo);
if (solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicyFactory)) {
assertNotNull(solrIndexConfig.mergedSegmentWarmerInfo);
} else {
assertNull(solrIndexConfig.mergedSegmentWarmerInfo);
}
assertNotNull(solrIndexConfig.mergeSchedulerInfo);
Map<String, Object> m = solrIndexConfig.toMap(new LinkedHashMap<>());
int mSizeExpected = 0;
++mSizeExpected;
assertTrue(m.get("useCompoundFile") instanceof Boolean);
++mSizeExpected;
assertTrue(m.get("maxBufferedDocs") instanceof Integer);
++mSizeExpected;
assertTrue(m.get("ramBufferSizeMB") instanceof Double);
++mSizeExpected;
assertTrue(m.get("writeLockTimeout") instanceof Integer);
++mSizeExpected;
assertTrue(m.get("lockType") instanceof String);
{
final String lockType = (String) m.get("lockType");
assertTrue(DirectoryFactory.LOCK_TYPE_SIMPLE.equals(lockType) || DirectoryFactory.LOCK_TYPE_NATIVE.equals(lockType) || DirectoryFactory.LOCK_TYPE_SINGLE.equals(lockType) || DirectoryFactory.LOCK_TYPE_NONE.equals(lockType) || DirectoryFactory.LOCK_TYPE_HDFS.equals(lockType));
}
++mSizeExpected;
assertTrue(m.get("infoStreamEnabled") instanceof Boolean);
{
assertFalse(Boolean.valueOf(m.get("infoStreamEnabled").toString()).booleanValue());
}
++mSizeExpected;
assertTrue(m.get("mergeScheduler") instanceof MapSerializable);
++mSizeExpected;
assertTrue(m.get("mergePolicyFactory") instanceof MapSerializable);
if (solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicyFactory)) {
++mSizeExpected;
assertTrue(m.get("mergedSegmentWarmer") instanceof MapSerializable);
} else {
assertNull(m.get("mergedSegmentWarmer"));
}
++mSizeExpected;
assertNotNull(m.get("metrics"));
assertEquals(mSizeExpected, m.size());
}
Aggregations