use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testZeroReducerStream.
@Test
public void testZeroReducerStream() throws Exception {
//Gracefully handle zero results
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
//Test with spaces in the parameter lists.
SolrParams sParamsA = mapParams("q", "blah", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
rstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(rstream);
assertEquals(0, tuples.size());
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testDaemonTopicStream.
@Test
public void testDaemonTopicStream() throws Exception {
Assume.assumeTrue(!useAlias);
StreamContext context = new StreamContext();
SolrClientCache cache = new SolrClientCache();
context.setSolrClientCache(cache);
try {
SolrParams sParams = mapParams("q", "a_s:hello0", "rows", "500", "fl", "id");
TopicStream topicStream = new TopicStream(zkHost, COLLECTIONORALIAS, COLLECTIONORALIAS, "50000000", -1, 1000000, sParams);
DaemonStream daemonStream = new DaemonStream(topicStream, "daemon1", 1000, 500);
daemonStream.setStreamContext(context);
daemonStream.open();
// Wait for the checkpoint
JettySolrRunner jetty = cluster.getJettySolrRunners().get(0);
SolrParams sParams1 = mapParams("qt", "/get", "ids", "50000000", "fl", "id");
int count = 0;
while (count == 0) {
SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/" + COLLECTIONORALIAS, sParams1);
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
count = tuples.size();
if (count > 0) {
Tuple t = tuples.get(0);
assertTrue(t.getLong("id") == 50000000);
} else {
System.out.println("###### Waiting for checkpoint #######:" + count);
}
}
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello0", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello0", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
for (int i = 0; i < 5; i++) {
daemonStream.read();
}
new UpdateRequest().add(id, "5", "a_s", "hello0", "a_i", "4", "a_f", "4").add(id, "6", "a_s", "hello0", "a_i", "4", "a_f", "4").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
for (int i = 0; i < 2; i++) {
daemonStream.read();
}
daemonStream.shutdown();
Tuple tuple = daemonStream.read();
assertTrue(tuple.EOF);
daemonStream.close();
} finally {
cache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testTuple.
@Test
public void testTuple() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "5.1", "s_multi", "a", "s_multi", "b", "i_multi", "1", "i_multi", "2", "f_multi", "1.2", "f_multi", "1.3").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f,s_multi,i_multi,f_multi", "sort", "a_s asc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
stream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(stream);
Tuple tuple = tuples.get(0);
String s = tuple.getString("a_s");
assertEquals("hello0", s);
long l = tuple.getLong("a_i");
assertEquals(0, l);
double d = tuple.getDouble("a_f");
assertEquals(5.1, d, 0.001);
List<String> stringList = tuple.getStrings("s_multi");
assertEquals("a", stringList.get(0));
assertEquals("b", stringList.get(1));
List<Long> longList = tuple.getLongs("i_multi");
assertEquals(1, longList.get(0).longValue());
assertEquals(2, longList.get(1).longValue());
List<Double> doubleList = tuple.getDoubles("f_multi");
assertEquals(1.2, doubleList.get(0).doubleValue(), 0.001);
assertEquals(1.3, doubleList.get(1).doubleValue(), 0.001);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testMultipleFqClauses.
@Test
public void testMultipleFqClauses() throws Exception {
new UpdateRequest().add(id, "0", "a_ss", "hello0", "a_ss", "hello1", "a_i", "0", "a_f", "0").add(id, "2", "a_ss", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_ss", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_ss", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_ss", "hello1", "a_i", "1", "a_f", "1").add(id, "5", "a_ss", "hello1", "a_i", "10", "a_f", "1").add(id, "6", "a_ss", "hello1", "a_i", "11", "a_f", "5").add(id, "7", "a_ss", "hello1", "a_i", "12", "a_f", "5").add(id, "8", "a_ss", "hello1", "a_i", "13", "a_f", "4").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
streamFactory.withCollectionZkHost(COLLECTIONORALIAS, zkHost);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
ModifiableSolrParams params = new ModifiableSolrParams(mapParams("q", "*:*", "fl", "id,a_i", "sort", "a_i asc", "fq", "a_ss:hello0", "fq", "a_ss:hello1"));
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, params);
stream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(stream);
assertEquals("Multiple fq clauses should have been honored", 1, tuples.size());
assertEquals("should only have gotten back document 0", "0", tuples.get(0).getString("id"));
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class GraphExpressionTest method testShortestPathStream.
@Test
public void testShortestPathStream() throws Exception {
new UpdateRequest().add(id, "0", "from_s", "jim", "to_s", "mike", "predicate_s", "knows").add(id, "1", "from_s", "jim", "to_s", "dave", "predicate_s", "knows").add(id, "2", "from_s", "jim", "to_s", "stan", "predicate_s", "knows").add(id, "3", "from_s", "dave", "to_s", "stan", "predicate_s", "knows").add(id, "4", "from_s", "dave", "to_s", "bill", "predicate_s", "knows").add(id, "5", "from_s", "dave", "to_s", "mike", "predicate_s", "knows").add(id, "20", "from_s", "dave", "to_s", "alex", "predicate_s", "knows").add(id, "21", "from_s", "alex", "to_s", "steve", "predicate_s", "knows").add(id, "6", "from_s", "stan", "to_s", "alice", "predicate_s", "knows").add(id, "7", "from_s", "stan", "to_s", "mary", "predicate_s", "knows").add(id, "8", "from_s", "stan", "to_s", "dave", "predicate_s", "knows").add(id, "10", "from_s", "mary", "to_s", "mike", "predicate_s", "knows").add(id, "11", "from_s", "mary", "to_s", "max", "predicate_s", "knows").add(id, "12", "from_s", "mary", "to_s", "jim", "predicate_s", "knows").add(id, "13", "from_s", "mary", "to_s", "steve", "predicate_s", "knows").commit(cluster.getSolrClient(), COLLECTION);
List<Tuple> tuples = null;
Set<String> paths = null;
ShortestPathStream stream = null;
StreamContext context = new StreamContext();
SolrClientCache cache = new SolrClientCache();
context.setSolrClientCache(cache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withFunctionName("shortestPath", ShortestPathStream.class);
Map params = new HashMap();
params.put("fq", "predicate_s:knows");
stream = (ShortestPathStream) factory.constructStream("shortestPath(collection1, " + "from=\"jim\", " + "to=\"steve\"," + "edge=\"from_s=to_s\"," + "fq=\"predicate_s:knows\"," + "threads=\"3\"," + "partitionSize=\"3\"," + "maxDepth=\"6\")");
stream.setStreamContext(context);
paths = new HashSet();
tuples = getTuples(stream);
assertTrue(tuples.size() == 2);
for (Tuple tuple : tuples) {
paths.add(tuple.getStrings("path").toString());
}
assertTrue(paths.contains("[jim, dave, alex, steve]"));
assertTrue(paths.contains("[jim, stan, mary, steve]"));
//Test with batch size of 1
params.put("fq", "predicate_s:knows");
stream = (ShortestPathStream) factory.constructStream("shortestPath(collection1, " + "from=\"jim\", " + "to=\"steve\"," + "edge=\"from_s=to_s\"," + "fq=\"predicate_s:knows\"," + "threads=\"3\"," + "partitionSize=\"1\"," + "maxDepth=\"6\")");
stream.setStreamContext(context);
paths = new HashSet();
tuples = getTuples(stream);
assertTrue(tuples.size() == 2);
for (Tuple tuple : tuples) {
paths.add(tuple.getStrings("path").toString());
}
assertTrue(paths.contains("[jim, dave, alex, steve]"));
assertTrue(paths.contains("[jim, stan, mary, steve]"));
//Test with bad predicate
stream = (ShortestPathStream) factory.constructStream("shortestPath(collection1, " + "from=\"jim\", " + "to=\"steve\"," + "edge=\"from_s=to_s\"," + "fq=\"predicate_s:crap\"," + "threads=\"3\"," + "partitionSize=\"3\"," + "maxDepth=\"6\")");
stream.setStreamContext(context);
paths = new HashSet();
tuples = getTuples(stream);
assertTrue(tuples.size() == 0);
//Test with depth 2
stream = (ShortestPathStream) factory.constructStream("shortestPath(collection1, " + "from=\"jim\", " + "to=\"steve\"," + "edge=\"from_s=to_s\"," + "fq=\"predicate_s:knows\"," + "threads=\"3\"," + "partitionSize=\"3\"," + "maxDepth=\"2\")");
stream.setStreamContext(context);
tuples = getTuples(stream);
assertTrue(tuples.size() == 0);
//Take out alex
params.put("fq", "predicate_s:knows NOT to_s:alex");
stream = (ShortestPathStream) factory.constructStream("shortestPath(collection1, " + "from=\"jim\", " + "to=\"steve\"," + "edge=\"from_s=to_s\"," + "fq=\" predicate_s:knows NOT to_s:alex\"," + "threads=\"3\"," + "partitionSize=\"3\"," + "maxDepth=\"6\")");
stream.setStreamContext(context);
paths = new HashSet();
tuples = getTuples(stream);
assertTrue(tuples.size() == 1);
for (Tuple tuple : tuples) {
paths.add(tuple.getStrings("path").toString());
}
assertTrue(paths.contains("[jim, stan, mary, steve]"));
cache.close();
}
Aggregations