use of org.apache.solr.common.util.SimpleOrderedMap in project lucene-solr by apache.
the class FacetBucket method getMergedBucket.
public SimpleOrderedMap getMergedBucket() {
SimpleOrderedMap out = new SimpleOrderedMap((subs == null ? 0 : subs.size()) + 2);
if (bucketValue != null) {
out.add("val", bucketValue);
}
out.add("count", count);
if (subs != null) {
for (Map.Entry<String, FacetMerger> mergerEntry : subs.entrySet()) {
FacetMerger subMerger = mergerEntry.getValue();
out.add(mergerEntry.getKey(), subMerger.getMergedResult());
}
}
return out;
}
use of org.apache.solr.common.util.SimpleOrderedMap in project lucene-solr by apache.
the class FacetFieldProcessor method refineFacets.
protected SimpleOrderedMap<Object> refineFacets() throws IOException {
boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
// We have not seen this bucket: do full faceting for this bucket, including all sub-facets
List leaves = asList(fcontext.facetInfo.get("_l"));
// We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets.
List<List> skip = asList(fcontext.facetInfo.get("_s"));
// We have not seen this bucket, do full faceting for this bucket, and most sub-facets... but some sub-facets are partial and should only visit specified buckets.
List<List> partial = asList(fcontext.facetInfo.get("_p"));
// For leaf refinements, we do full faceting for each leaf bucket. Any sub-facets of these buckets will be fully evaluated. Because of this, we should never
// encounter leaf refinements that have sub-facets that return partial results.
SimpleOrderedMap<Object> res = new SimpleOrderedMap<>();
List<SimpleOrderedMap> bucketList = new ArrayList<>(leaves.size() + skip.size() + partial.size());
res.add("buckets", bucketList);
// TODO: an alternate implementations can fill all accs at once
createAccs(-1, 1);
for (Object bucketVal : leaves) {
bucketList.add(refineBucket(bucketVal, false, null));
}
for (List bucketAndFacetInfo : skip) {
assert bucketAndFacetInfo.size() == 2;
Object bucketVal = bucketAndFacetInfo.get(0);
Map<String, Object> facetInfo = (Map<String, Object>) bucketAndFacetInfo.get(1);
bucketList.add(refineBucket(bucketVal, true, facetInfo));
}
// The only difference between skip and missing is the value of "skip" passed to refineBucket
for (List bucketAndFacetInfo : partial) {
assert bucketAndFacetInfo.size() == 2;
Object bucketVal = bucketAndFacetInfo.get(0);
Map<String, Object> facetInfo = (Map<String, Object>) bucketAndFacetInfo.get(1);
bucketList.add(refineBucket(bucketVal, false, facetInfo));
}
if (freq.missing) {
Map<String, Object> bucketFacetInfo = (Map<String, Object>) fcontext.facetInfo.get("missing");
if (bucketFacetInfo != null || !skipThisFacet) {
SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null, skipThisFacet, bucketFacetInfo);
res.add("missing", missingBucket);
}
}
if (freq.numBuckets && !skipThisFacet) {
calculateNumBuckets(res);
}
return res;
}
use of org.apache.solr.common.util.SimpleOrderedMap in project lucene-solr by apache.
the class SolrPluginUtils method explanationToNamedList.
public static NamedList<Object> explanationToNamedList(Explanation e) {
NamedList<Object> out = new SimpleOrderedMap<>();
out.add("match", e.isMatch());
out.add("value", e.getValue());
out.add("description", e.getDescription());
Explanation[] details = e.getDetails();
// short circut out
if (0 == details.length)
return out;
List<NamedList<Object>> kids = new ArrayList<>(details.length);
for (Explanation d : details) {
kids.add(explanationToNamedList(d));
}
out.add("details", kids);
return out;
}
use of org.apache.solr.common.util.SimpleOrderedMap in project lucene-solr by apache.
the class SolrPluginUtils method getExplanations.
/**
* Generates an NamedList of Explanations for each item in a list of docs.
*
* @param query The Query you want explanations in the context of
* @param docs The Documents you want explained relative that query
*/
public static NamedList<Explanation> getExplanations(Query query, DocList docs, SolrIndexSearcher searcher, IndexSchema schema) throws IOException {
NamedList<Explanation> explainList = new SimpleOrderedMap<>();
DocIterator iterator = docs.iterator();
for (int i = 0; i < docs.size(); i++) {
int id = iterator.nextDoc();
Document doc = searcher.doc(id);
String strid = schema.printableUniqueKey(doc);
explainList.add(strid, searcher.explain(query, id));
}
return explainList;
}
use of org.apache.solr.common.util.SimpleOrderedMap in project lucene-solr by apache.
the class OverseerStatusCmd method call.
@Override
@SuppressWarnings("unchecked")
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
ZkStateReader zkStateReader = ocmh.zkStateReader;
String leaderNode = OverseerTaskProcessor.getLeaderNode(zkStateReader.getZkClient());
results.add("leader", leaderNode);
Stat stat = new Stat();
zkStateReader.getZkClient().getData("/overseer/queue", null, stat, true);
results.add("overseer_queue_size", stat.getNumChildren());
stat = new Stat();
zkStateReader.getZkClient().getData("/overseer/queue-work", null, stat, true);
results.add("overseer_work_queue_size", stat.getNumChildren());
stat = new Stat();
zkStateReader.getZkClient().getData("/overseer/collection-queue-work", null, stat, true);
results.add("overseer_collection_queue_size", stat.getNumChildren());
NamedList overseerStats = new NamedList();
NamedList collectionStats = new NamedList();
NamedList stateUpdateQueueStats = new NamedList();
NamedList workQueueStats = new NamedList();
NamedList collectionQueueStats = new NamedList();
Overseer.Stats stats = ocmh.stats;
for (Map.Entry<String, Overseer.Stat> entry : stats.getStats().entrySet()) {
String key = entry.getKey();
NamedList<Object> lst = new SimpleOrderedMap<>();
if (key.startsWith("collection_")) {
collectionStats.add(key.substring(11), lst);
int successes = stats.getSuccessCount(entry.getKey());
int errors = stats.getErrorCount(entry.getKey());
lst.add("requests", successes);
lst.add("errors", errors);
List<Overseer.FailedOp> failureDetails = stats.getFailureDetails(key);
if (failureDetails != null) {
List<SimpleOrderedMap<Object>> failures = new ArrayList<>();
for (Overseer.FailedOp failedOp : failureDetails) {
SimpleOrderedMap<Object> fail = new SimpleOrderedMap<>();
fail.add("request", failedOp.req.getProperties());
fail.add("response", failedOp.resp.getResponse());
failures.add(fail);
}
lst.add("recent_failures", failures);
}
} else if (key.startsWith("/overseer/queue_")) {
stateUpdateQueueStats.add(key.substring(16), lst);
} else if (key.startsWith("/overseer/queue-work_")) {
workQueueStats.add(key.substring(21), lst);
} else if (key.startsWith("/overseer/collection-queue-work_")) {
collectionQueueStats.add(key.substring(32), lst);
} else {
// overseer stats
overseerStats.add(key, lst);
int successes = stats.getSuccessCount(entry.getKey());
int errors = stats.getErrorCount(entry.getKey());
lst.add("requests", successes);
lst.add("errors", errors);
}
Timer timer = entry.getValue().requestTime;
MetricUtils.addMetrics(lst, timer);
}
results.add("overseer_operations", overseerStats);
results.add("collection_operations", collectionStats);
results.add("overseer_queue", stateUpdateQueueStats);
results.add("overseer_internal_queue", workQueueStats);
results.add("collection_queue", collectionQueueStats);
}
Aggregations