use of org.apache.solr.common.SolrDocumentList in project lucene-solr by apache.
the class TestCSVResponseWriter method testCSVOutput.
@Test
public void testCSVOutput() throws Exception {
// test our basic types,and that fields come back in the requested order
assertEquals("id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt\n1,hi,-1,12345678987654321,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n", h.query(req("q", "id:1", "wt", "csv", "fl", "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt")));
// test retrieving score, csv.header
assertEquals("1,0.0,hi\n", h.query(req("q", "id:1^0", "wt", "csv", "csv.header", "false", "fl", "id,score,foo_s")));
// test multivalued
assertEquals("2,\"hi,there\"\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "fl", "id,v_ss")));
// test separator change
assertEquals("2|\"hi|there\"\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.separator", "|", "fl", "id,v_ss")));
// test mv separator change
assertEquals("2,hi|there\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.mv.separator", "|", "fl", "id,v_ss")));
// test mv separator change for a single field
assertEquals("2,hi|there,nice:output\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.mv.separator", "|", "f.v2_ss.csv.separator", ":", "fl", "id,v_ss,v2_ss")));
// test csv field for polyfield (currency) SOLR-3959
assertEquals("4,\"1.50\\,EUR\"\n", h.query(req("q", "id:4", "wt", "csv", "csv.header", "false", "fl", "id,amount_c")));
// test csv field for polyfield (latlon) SOLR-3959
assertEquals("5,\"12.434\\,-134.1\"\n", h.query(req("q", "id:5", "wt", "csv", "csv.header", "false", "fl", "id,store")));
// test retrieving fields from index
String result = h.query(req("q", "*:*", "wt", "csv", "csv.header", "true", "fl", "*,score"));
for (String field : "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss,score".split(",")) {
assertTrue(result.indexOf(field) >= 0);
}
// test null values
assertEquals("2,,hi|there\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.mv.separator", "|", "fl", "id,foo_s,v_ss")));
// test alternate null value
assertEquals("2,NULL,hi|there\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.mv.separator", "|", "csv.null", "NULL", "fl", "id,foo_s,v_ss")));
// test alternate newline
assertEquals("2,\"hi,there\"\r\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.newline", "\r\n", "fl", "id,v_ss")));
// test alternate encapsulator
assertEquals("2,'hi,there'\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.encapsulator", "'", "fl", "id,v_ss")));
// test using escape instead of encapsulator
assertEquals("2,hi\\,there\n", h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "csv.escape", "\\", "fl", "id,v_ss")));
// test multiple lines
assertEquals("1,,hi\n2,\"hi,there\",\n", h.query(req("q", "id:[1 TO 2]", "wt", "csv", "csv.header", "false", "fl", "id,v_ss,foo_s")));
// test SOLR-2970 not returning non-stored fields by default. Compare sorted list
assertEquals(sortHeader("amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l\n"), sortHeader(h.query(req("q", "id:3", "wt", "csv", "csv.header", "true", "fl", "*", "rows", "0"))));
// now test SolrDocumentList
SolrDocument d = new SolrDocument();
SolrDocument d1 = d;
d.addField("id", "1");
d.addField("foo_i", -1);
d.addField("foo_s", "hi");
d.addField("foo_l", "12345678987654321L");
d.addField("foo_b", false);
d.addField("foo_f", 1.414f);
d.addField("foo_d", -1.0E300);
d.addField("foo_dt", new Date(Instant.parse("2000-01-02T03:04:05Z").toEpochMilli()));
d.addField("score", "2.718");
d = new SolrDocument();
SolrDocument d2 = d;
d.addField("id", "2");
d.addField("v_ss", "hi");
d.addField("v_ss", "there");
d.addField("v2_ss", "nice");
d.addField("v2_ss", "output");
d.addField("score", "89.83");
d.addField("shouldbeunstored", "foo");
SolrDocumentList sdl = new SolrDocumentList();
sdl.add(d1);
sdl.add(d2);
SolrQueryRequest req = req("q", "*:*");
SolrQueryResponse rsp = new SolrQueryResponse();
rsp.addResponse(sdl);
QueryResponseWriter w = new CSVResponseWriter();
rsp.setReturnFields(new SolrReturnFields("id,foo_s", req));
StringWriter buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_s\n1,hi\n2,\n", buf.toString());
// try scores
rsp.setReturnFields(new SolrReturnFields("id,score,foo_s", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,score,foo_s\n1,2.718,hi\n2,89.83,\n", buf.toString());
// get field values from docs... should be ordered and not include score unless requested
rsp.setReturnFields(new SolrReturnFields("*", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss\n" + "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z,,\n" + "2,,,,,,,,\"hi,there\",\"nice,output\"\n", buf.toString());
// get field values and scores - just check that the scores are there... we don't guarantee where
rsp.setReturnFields(new SolrReturnFields("*,score", req));
buf = new StringWriter();
w.write(buf, req, rsp);
String s = buf.toString();
assertTrue(s.indexOf("score") >= 0 && s.indexOf("2.718") > 0 && s.indexOf("89.83") > 0);
// Test field globs
rsp.setReturnFields(new SolrReturnFields("id,foo*", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt\n" + "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n" + "2,,,,,,,\n", buf.toString());
rsp.setReturnFields(new SolrReturnFields("id,*_d*", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("id,foo_d,foo_dt\n" + "1,-1.0E300,2000-01-02T03:04:05Z\n" + "2,,\n", buf.toString());
// Test function queries
rsp.setReturnFields(new SolrReturnFields("sum(1,1),id,exists(foo_i),div(9,1),foo_f", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("\"sum(1,1)\",id,exists(foo_i),\"div(9,1)\",foo_f\n" + "\"\",1,,,1.414\n" + "\"\",2,,,\n", buf.toString());
// Test transformers
rsp.setReturnFields(new SolrReturnFields("mydocid:[docid],[explain]", req));
buf = new StringWriter();
w.write(buf, req, rsp);
assertEquals("mydocid,[explain]\n" + "\"\",\n" + "\"\",\n", buf.toString());
req.close();
}
use of org.apache.solr.common.SolrDocumentList in project jackrabbit-oak by apache.
the class SolrQueryIndex method putSpellChecks.
private void putSpellChecks(SpellCheckResponse spellCheckResponse, final Deque<SolrResultRow> queue, Filter filter, OakSolrConfiguration configuration, SolrServer solrServer) throws SolrServerException {
List<SpellCheckResponse.Suggestion> suggestions = spellCheckResponse.getSuggestions();
Collection<String> alternatives = new ArrayList<String>(suggestions.size());
for (SpellCheckResponse.Suggestion suggestion : suggestions) {
alternatives.addAll(suggestion.getAlternatives());
}
// ACL filter spellcheck results
for (String alternative : alternatives) {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setParam("q", alternative);
solrQuery.setParam("df", configuration.getCatchAllField());
solrQuery.setParam("q.op", "AND");
solrQuery.setParam("rows", "100");
QueryResponse suggestQueryResponse = solrServer.query(solrQuery);
SolrDocumentList results = suggestQueryResponse.getResults();
if (results != null && results.getNumFound() > 0) {
for (SolrDocument doc : results) {
if (filter.isAccessible(String.valueOf(doc.getFieldValue(configuration.getPathField())))) {
queue.add(new SolrResultRow(alternative));
break;
}
}
}
}
}
use of org.apache.solr.common.SolrDocumentList in project jackrabbit-oak by apache.
the class SolrQueryIndex method putSuggestions.
private void putSuggestions(Set<Map.Entry<String, Object>> suggestEntries, final Deque<SolrResultRow> queue, Filter filter, OakSolrConfiguration configuration, SolrServer solrServer) throws SolrServerException {
Collection<SimpleOrderedMap<Object>> retrievedSuggestions = new HashSet<SimpleOrderedMap<Object>>();
for (Map.Entry<String, Object> suggester : suggestEntries) {
SimpleOrderedMap<Object> suggestionResponses = ((SimpleOrderedMap) suggester.getValue());
for (Map.Entry<String, Object> suggestionResponse : suggestionResponses) {
SimpleOrderedMap<Object> suggestionResults = ((SimpleOrderedMap) suggestionResponse.getValue());
for (Map.Entry<String, Object> suggestionResult : suggestionResults) {
if ("suggestions".equals(suggestionResult.getKey())) {
ArrayList<SimpleOrderedMap<Object>> suggestions = ((ArrayList<SimpleOrderedMap<Object>>) suggestionResult.getValue());
if (!suggestions.isEmpty()) {
for (SimpleOrderedMap<Object> suggestion : suggestions) {
retrievedSuggestions.add(suggestion);
}
}
}
}
}
}
// ACL filter suggestions
for (SimpleOrderedMap<Object> suggestion : retrievedSuggestions) {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setParam("q", String.valueOf(suggestion.get("term")));
solrQuery.setParam("df", configuration.getCatchAllField());
solrQuery.setParam("q.op", "AND");
solrQuery.setParam("rows", "100");
QueryResponse suggestQueryResponse = solrServer.query(solrQuery);
SolrDocumentList results = suggestQueryResponse.getResults();
if (results != null && results.getNumFound() > 0) {
for (SolrDocument doc : results) {
if (filter.isAccessible(String.valueOf(doc.getFieldValue(configuration.getPathField())))) {
queue.add(new SolrResultRow(suggestion.get("term").toString(), Double.parseDouble(suggestion.get("weight").toString())));
break;
}
}
}
}
}
use of org.apache.solr.common.SolrDocumentList in project jackrabbit-oak by apache.
the class SolrQueryIndex method getIterator.
private AbstractIterator<SolrResultRow> getIterator(final Filter filter, final IndexPlan plan, final String parent, final int parentDepth, final OakSolrConfiguration configuration, final SolrServer solrServer, final LMSEstimator estimator) {
return new AbstractIterator<SolrResultRow>() {
public Collection<FacetField> facetFields = new LinkedList<FacetField>();
private final Set<String> seenPaths = Sets.newHashSet();
private final Deque<SolrResultRow> queue = Queues.newArrayDeque();
private int offset = 0;
private boolean noDocs = false;
private long numFound = 0;
@Override
protected SolrResultRow computeNext() {
if (!queue.isEmpty() || loadDocs()) {
return queue.remove();
}
return endOfData();
}
private SolrResultRow convertToRow(SolrDocument doc) {
String path = String.valueOf(doc.getFieldValue(configuration.getPathField()));
if ("".equals(path)) {
path = "/";
}
if (!parent.isEmpty()) {
path = getAncestorPath(path, parentDepth);
// avoid duplicate entries
if (seenPaths.contains(path)) {
return null;
}
seenPaths.add(path);
}
float score = 0f;
Object scoreObj = doc.get("score");
if (scoreObj != null) {
score = (Float) scoreObj;
}
return new SolrResultRow(path, score, doc, facetFields);
}
/**
* Loads the Solr documents in batches
* @return true if any document is loaded
*/
private boolean loadDocs() {
if (noDocs) {
return false;
}
try {
if (log.isDebugEnabled()) {
log.debug("converting filter {}", filter);
}
SolrQuery query = FilterQueryParser.getQuery(filter, plan, configuration);
if (numFound > 0) {
long rows = configuration.getRows();
long maxQueries = numFound / 2;
if (maxQueries > configuration.getRows()) {
// adjust the rows to avoid making more than 3 Solr requests for this particular query
rows = maxQueries;
query.setParam("rows", String.valueOf(rows));
}
long newOffset = configuration.getRows() + offset * rows;
if (newOffset >= numFound) {
return false;
}
query.setParam("start", String.valueOf(newOffset));
offset++;
}
if (log.isDebugEnabled()) {
log.debug("sending query {}", query);
}
QueryResponse queryResponse = solrServer.query(query);
if (log.isDebugEnabled()) {
log.debug("getting response {}", queryResponse.getHeader());
}
SolrDocumentList docs = queryResponse.getResults();
if (docs != null) {
numFound = docs.getNumFound();
estimator.update(filter, docs);
Map<String, Map<String, List<String>>> highlighting = queryResponse.getHighlighting();
for (SolrDocument doc : docs) {
// handle highlight
if (highlighting != null) {
Object pathObject = doc.getFieldValue(configuration.getPathField());
if (pathObject != null && highlighting.get(String.valueOf(pathObject)) != null) {
Map<String, List<String>> value = highlighting.get(String.valueOf(pathObject));
for (Map.Entry<String, List<String>> entry : value.entrySet()) {
// all highlighted values end up in 'rep:excerpt', regardless of field match
for (String v : entry.getValue()) {
doc.addField(QueryImpl.REP_EXCERPT, v);
}
}
}
}
SolrResultRow row = convertToRow(doc);
if (row != null) {
queue.add(row);
}
}
}
// get facets
List<FacetField> returnedFieldFacet = queryResponse.getFacetFields();
if (returnedFieldFacet != null) {
facetFields.addAll(returnedFieldFacet);
}
// filter facets on doc paths
if (!facetFields.isEmpty() && docs != null) {
for (SolrDocument doc : docs) {
String path = String.valueOf(doc.getFieldValue(configuration.getPathField()));
// if facet path doesn't exist for the calling user, filter the facet for this doc
for (FacetField ff : facetFields) {
if (!filter.isAccessible(path + "/" + ff.getName())) {
filterFacet(doc, ff);
}
}
}
}
// handle spellcheck
SpellCheckResponse spellCheckResponse = queryResponse.getSpellCheckResponse();
if (spellCheckResponse != null && spellCheckResponse.getSuggestions() != null && spellCheckResponse.getSuggestions().size() > 0) {
putSpellChecks(spellCheckResponse, queue, filter, configuration, solrServer);
noDocs = true;
}
// handle suggest
NamedList<Object> response = queryResponse.getResponse();
Map suggest = (Map) response.get("suggest");
if (suggest != null) {
Set<Map.Entry<String, Object>> suggestEntries = suggest.entrySet();
if (!suggestEntries.isEmpty()) {
putSuggestions(suggestEntries, queue, filter, configuration, solrServer);
noDocs = true;
}
}
} catch (Exception e) {
if (log.isWarnEnabled()) {
log.warn("query via {} failed.", solrServer, e);
}
}
return !queue.isEmpty();
}
};
}
use of org.apache.solr.common.SolrDocumentList in project lucene-solr by apache.
the class SolrSlf4jReporterTest method testReporter.
@Test
public void testReporter() throws Exception {
LogWatcherConfig watcherCfg = new LogWatcherConfig(true, null, null, 100);
LogWatcher watcher = LogWatcher.newRegisteredLogWatcher(watcherCfg, null);
watcher.setThreshold("INFO");
Path home = Paths.get(TEST_HOME());
// define these properties, they are used in solrconfig.xml
System.setProperty("solr.test.sys.prop1", "propone");
System.setProperty("solr.test.sys.prop2", "proptwo");
String solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr-slf4jreporter.xml").toFile(), "UTF-8");
NodeConfig cfg = SolrXmlConfig.fromString(new SolrResourceLoader(home), solrXml);
CoreContainer cc = createCoreContainer(cfg, new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
h.coreName = DEFAULT_TEST_CORENAME;
SolrMetricManager metricManager = cc.getMetricManager();
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.node");
assertTrue(reporters.toString(), reporters.size() >= 2);
SolrMetricReporter reporter = reporters.get("test1");
assertNotNull(reporter);
assertTrue(reporter instanceof SolrSlf4jReporter);
reporter = reporters.get("test2");
assertNotNull(reporter);
assertTrue(reporter instanceof SolrSlf4jReporter);
watcher.reset();
Thread.sleep(5000);
SolrDocumentList history = watcher.getHistory(-1, null);
// dot-separated names are treated like class names and collapsed
// in regular log output, but here we get the full name
assertTrue(history.stream().filter(d -> "solr.node".equals(d.getFirstValue("logger"))).count() > 0);
assertTrue(history.stream().filter(d -> "foobar".equals(d.getFirstValue("logger"))).count() > 0);
}
Aggregations