use of org.apache.solr.client.solrj.io.comp.FieldComparator in project lucene-solr by apache.
the class StreamingTest method testParallelRankStream.
@Test
public void testParallelRankStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "5", "a_s", "hello1", "a_i", "5", "a_f", "1").add(id, "6", "a_s", "hello1", "a_i", "6", "a_f", "1").add(id, "7", "a_s", "hello1", "a_i", "7", "a_f", "1").add(id, "8", "a_s", "hello1", "a_i", "8", "a_f", "1").add(id, "9", "a_s", "hello1", "a_i", "9", "a_f", "1").add(id, "10", "a_s", "hello1", "a_i", "10", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
RankStream rstream = new RankStream(stream, 11, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
attachStreamFactory(pstream);
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assertEquals(10, tuples.size());
assertOrder(tuples, 10, 9, 8, 7, 6, 5, 4, 3, 2, 0);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.comp.FieldComparator in project lucene-solr by apache.
the class StreamingTest method testSubFacetStream.
@Test
public void testSubFacetStream() throws Exception {
new UpdateRequest().add(id, "0", "level1_s", "hello0", "level2_s", "a", "a_i", "0", "a_f", "1").add(id, "2", "level1_s", "hello0", "level2_s", "a", "a_i", "2", "a_f", "2").add(id, "3", "level1_s", "hello3", "level2_s", "a", "a_i", "3", "a_f", "3").add(id, "4", "level1_s", "hello4", "level2_s", "a", "a_i", "4", "a_f", "4").add(id, "1", "level1_s", "hello0", "level2_s", "b", "a_i", "1", "a_f", "5").add(id, "5", "level1_s", "hello3", "level2_s", "b", "a_i", "10", "a_f", "6").add(id, "6", "level1_s", "hello4", "level2_s", "b", "a_i", "11", "a_f", "7").add(id, "7", "level1_s", "hello3", "level2_s", "b", "a_i", "12", "a_f", "8").add(id, "8", "level1_s", "hello3", "level2_s", "b", "a_i", "13", "a_f", "9").add(id, "9", "level1_s", "hello0", "level2_s", "b", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_i,a_f");
Bucket[] buckets = { new Bucket("level1_s"), new Bucket("level2_s") };
Metric[] metrics = { new SumMetric("a_i"), new CountMetric() };
FieldComparator[] sorts = { new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING), new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING) };
FacetStream facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(facetStream);
assertEquals(6, tuples.size());
Tuple tuple = tuples.get(0);
String bucket1 = tuple.getString("level1_s");
String bucket2 = tuple.getString("level2_s");
Double sumi = tuple.getDouble("sum(a_i)");
Double count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("b", bucket2);
assertEquals(35, sumi.longValue());
assertEquals(3, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("b", bucket2);
assertEquals(15, sumi.longValue());
assertEquals(2, count, 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("b", bucket2);
assertEquals(11, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("a", bucket2);
assertEquals(4, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("a", bucket2);
assertEquals(3, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("a", bucket2);
assertEquals(2, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
sorts[0] = new FieldComparator("level1_s", ComparatorOrder.DESCENDING);
sorts[1] = new FieldComparator("level2_s", ComparatorOrder.DESCENDING);
facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
tuples = getTuples(facetStream);
assertEquals(6, tuples.size());
tuple = tuples.get(0);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("b", bucket2);
assertEquals(11, sumi.longValue());
assertEquals(1, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("a", bucket2);
assertEquals(4, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("b", bucket2);
assertEquals(35, sumi.longValue());
assertEquals(3, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("a", bucket2);
assertEquals(3, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("b", bucket2);
assertEquals(15, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("a", bucket2);
assertEquals(2, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.comp.FieldComparator in project lucene-solr by apache.
the class StreamingTest method testParallelMergeStream.
@Test
public void testParallelMergeStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").add(id, "5", "a_s", "hello0", "a_i", "10", "a_f", "0").add(id, "6", "a_s", "hello2", "a_i", "8", "a_f", "0").add(id, "7", "a_s", "hello3", "a_i", "7", "a_f", "3").add(id, "8", "a_s", "hello4", "a_i", "11", "a_f", "4").add(id, "9", "a_s", "hello1", "a_i", "100", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
//Test ascending
SolrParams sParamsA = mapParams("q", "id:(4 1 8 7 9)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
SolrParams sParamsB = mapParams("q", "id:(0 2 3 6)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsB);
MergeStream mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i", ComparatorOrder.ASCENDING));
ParallelStream pstream = parallelStream(mstream, new FieldComparator("a_i", ComparatorOrder.ASCENDING));
attachStreamFactory(pstream);
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assertEquals(9, tuples.size());
assertOrder(tuples, 0, 1, 2, 3, 4, 7, 6, 8, 9);
//Test descending
sParamsA = mapParams("q", "id:(4 1 8 9)", "fl", "id,a_s,a_i", "sort", "a_i desc", "partitionKeys", "a_i");
streamA = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
sParamsB = mapParams("q", "id:(0 2 3 6)", "fl", "id,a_s,a_i", "sort", "a_i desc", "partitionKeys", "a_i");
streamB = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsB);
mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
pstream = parallelStream(mstream, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
attachStreamFactory(pstream);
pstream.setStreamContext(streamContext);
tuples = getTuples(pstream);
assertEquals(8, tuples.size());
assertOrder(tuples, 9, 8, 6, 4, 3, 2, 1, 0);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.comp.FieldComparator in project lucene-solr by apache.
the class GraphExpressionTest method testGatherNodesFriendsStream.
@Test
public void testGatherNodesFriendsStream() throws Exception {
new UpdateRequest().add(id, "0", "from_s", "bill", "to_s", "jim", "message_t", "Hello jim").add(id, "1", "from_s", "bill", "to_s", "sam", "message_t", "Hello sam").add(id, "2", "from_s", "bill", "to_s", "max", "message_t", "Hello max").add(id, "3", "from_s", "max", "to_s", "kip", "message_t", "Hello kip").add(id, "4", "from_s", "sam", "to_s", "steve", "message_t", "Hello steve").add(id, "5", "from_s", "jim", "to_s", "ann", "message_t", "Hello steve").commit(cluster.getSolrClient(), COLLECTION);
List<Tuple> tuples = null;
GatherNodesStream stream = null;
StreamContext context = new StreamContext();
SolrClientCache cache = new SolrClientCache();
context.setSolrClientCache(cache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withFunctionName("gatherNodes", GatherNodesStream.class).withFunctionName("search", CloudSolrStream.class).withFunctionName("count", CountMetric.class).withFunctionName("hashJoin", HashJoinStream.class).withFunctionName("avg", MeanMetric.class).withFunctionName("sum", SumMetric.class).withFunctionName("min", MinMetric.class).withFunctionName("max", MaxMetric.class);
String expr = "gatherNodes(collection1, " + "walk=\"bill->from_s\"," + "gather=\"to_s\")";
stream = (GatherNodesStream) factory.constructStream(expr);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 3);
assertTrue(tuples.get(0).getString("node").equals("jim"));
assertTrue(tuples.get(1).getString("node").equals("max"));
assertTrue(tuples.get(2).getString("node").equals("sam"));
//Test scatter branches, leaves and trackTraversal
expr = "gatherNodes(collection1, " + "walk=\"bill->from_s\"," + "gather=\"to_s\"," + "scatter=\"branches, leaves\", trackTraversal=\"true\")";
stream = (GatherNodesStream) factory.constructStream(expr);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).getString("node").equals("bill"));
assertTrue(tuples.get(0).getLong("level").equals(new Long(0)));
assertTrue(tuples.get(0).getStrings("ancestors").size() == 0);
assertTrue(tuples.get(1).getString("node").equals("jim"));
assertTrue(tuples.get(1).getLong("level").equals(new Long(1)));
List<String> ancestors = tuples.get(1).getStrings("ancestors");
System.out.println("##################### Ancestors:" + ancestors);
assert (ancestors.size() == 1);
assert (ancestors.get(0).equals("bill"));
assertTrue(tuples.get(2).getString("node").equals("max"));
assertTrue(tuples.get(2).getLong("level").equals(new Long(1)));
ancestors = tuples.get(2).getStrings("ancestors");
assert (ancestors.size() == 1);
assert (ancestors.get(0).equals("bill"));
assertTrue(tuples.get(3).getString("node").equals("sam"));
assertTrue(tuples.get(3).getLong("level").equals(new Long(1)));
ancestors = tuples.get(3).getStrings("ancestors");
assert (ancestors.size() == 1);
assert (ancestors.get(0).equals("bill"));
// Test query root
expr = "gatherNodes(collection1, " + "search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\")," + "walk=\"from_s->from_s\"," + "gather=\"to_s\")";
stream = (GatherNodesStream) factory.constructStream(expr);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 3);
assertTrue(tuples.get(0).getString("node").equals("jim"));
assertTrue(tuples.get(1).getString("node").equals("max"));
assertTrue(tuples.get(2).getString("node").equals("sam"));
// Test query root scatter branches
expr = "gatherNodes(collection1, " + "search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\")," + "walk=\"from_s->from_s\"," + "gather=\"to_s\", scatter=\"branches, leaves\")";
stream = (GatherNodesStream) factory.constructStream(expr);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).getString("node").equals("bill"));
assertTrue(tuples.get(0).getLong("level").equals(new Long(0)));
assertTrue(tuples.get(1).getString("node").equals("jim"));
assertTrue(tuples.get(1).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(2).getString("node").equals("max"));
assertTrue(tuples.get(2).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(3).getString("node").equals("sam"));
assertTrue(tuples.get(3).getLong("level").equals(new Long(1)));
expr = "gatherNodes(collection1, " + "search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\")," + "walk=\"from_s->from_s\"," + "gather=\"to_s\")";
String expr2 = "gatherNodes(collection1, " + expr + "," + "walk=\"node->from_s\"," + "gather=\"to_s\")";
stream = (GatherNodesStream) factory.constructStream(expr2);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 3);
assertTrue(tuples.get(0).getString("node").equals("ann"));
assertTrue(tuples.get(1).getString("node").equals("kip"));
assertTrue(tuples.get(2).getString("node").equals("steve"));
//Test two traversals in the same expression
String expr3 = "hashJoin(" + expr2 + ", hashed=" + expr2 + ", on=\"node\")";
HashJoinStream hstream = (HashJoinStream) factory.constructStream(expr3);
context = new StreamContext();
context.setSolrClientCache(cache);
hstream.setStreamContext(context);
tuples = getTuples(hstream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 3);
assertTrue(tuples.get(0).getString("node").equals("ann"));
assertTrue(tuples.get(1).getString("node").equals("kip"));
assertTrue(tuples.get(2).getString("node").equals("steve"));
//=================================
expr = "gatherNodes(collection1, " + "search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\")," + "walk=\"from_s->from_s\"," + "gather=\"to_s\")";
expr2 = "gatherNodes(collection1, " + expr + "," + "walk=\"node->from_s\"," + "gather=\"to_s\", scatter=\"branches, leaves\")";
stream = (GatherNodesStream) factory.constructStream(expr2);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 7);
assertTrue(tuples.get(0).getString("node").equals("ann"));
assertTrue(tuples.get(0).getLong("level").equals(new Long(2)));
assertTrue(tuples.get(1).getString("node").equals("bill"));
assertTrue(tuples.get(1).getLong("level").equals(new Long(0)));
assertTrue(tuples.get(2).getString("node").equals("jim"));
assertTrue(tuples.get(2).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(3).getString("node").equals("kip"));
assertTrue(tuples.get(3).getLong("level").equals(new Long(2)));
assertTrue(tuples.get(4).getString("node").equals("max"));
assertTrue(tuples.get(4).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(5).getString("node").equals("sam"));
assertTrue(tuples.get(5).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(6).getString("node").equals("steve"));
assertTrue(tuples.get(6).getLong("level").equals(new Long(2)));
//Add a cycle from jim to bill
new UpdateRequest().add(id, "6", "from_s", "jim", "to_s", "bill", "message_t", "Hello steve").add(id, "7", "from_s", "sam", "to_s", "bill", "message_t", "Hello steve").commit(cluster.getSolrClient(), COLLECTION);
expr = "gatherNodes(collection1, " + "search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\")," + "walk=\"from_s->from_s\"," + "gather=\"to_s\", trackTraversal=\"true\")";
expr2 = "gatherNodes(collection1, " + expr + "," + "walk=\"node->from_s\"," + "gather=\"to_s\", scatter=\"branches, leaves\", trackTraversal=\"true\")";
stream = (GatherNodesStream) factory.constructStream(expr2);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 7);
assertTrue(tuples.get(0).getString("node").equals("ann"));
assertTrue(tuples.get(0).getLong("level").equals(new Long(2)));
//Bill should now have one ancestor
assertTrue(tuples.get(1).getString("node").equals("bill"));
assertTrue(tuples.get(1).getLong("level").equals(new Long(0)));
assertTrue(tuples.get(1).getStrings("ancestors").size() == 2);
List<String> anc = tuples.get(1).getStrings("ancestors");
Collections.sort(anc);
assertTrue(anc.get(0).equals("jim"));
assertTrue(anc.get(1).equals("sam"));
assertTrue(tuples.get(2).getString("node").equals("jim"));
assertTrue(tuples.get(2).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(3).getString("node").equals("kip"));
assertTrue(tuples.get(3).getLong("level").equals(new Long(2)));
assertTrue(tuples.get(4).getString("node").equals("max"));
assertTrue(tuples.get(4).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(5).getString("node").equals("sam"));
assertTrue(tuples.get(5).getLong("level").equals(new Long(1)));
assertTrue(tuples.get(6).getString("node").equals("steve"));
assertTrue(tuples.get(6).getLong("level").equals(new Long(2)));
cache.close();
}
use of org.apache.solr.client.solrj.io.comp.FieldComparator in project lucene-solr by apache.
the class GraphExpressionTest method testGatherNodesStream.
@Test
public void testGatherNodesStream() throws Exception {
new UpdateRequest().add(id, "0", "basket_s", "basket1", "product_s", "product1", "price_f", "20").add(id, "1", "basket_s", "basket1", "product_s", "product3", "price_f", "30").add(id, "2", "basket_s", "basket1", "product_s", "product5", "price_f", "1").add(id, "3", "basket_s", "basket2", "product_s", "product1", "price_f", "2").add(id, "4", "basket_s", "basket2", "product_s", "product6", "price_f", "5").add(id, "5", "basket_s", "basket2", "product_s", "product7", "price_f", "10").add(id, "6", "basket_s", "basket3", "product_s", "product4", "price_f", "20").add(id, "7", "basket_s", "basket3", "product_s", "product3", "price_f", "10").add(id, "8", "basket_s", "basket3", "product_s", "product1", "price_f", "10").add(id, "9", "basket_s", "basket4", "product_s", "product4", "price_f", "40").add(id, "10", "basket_s", "basket4", "product_s", "product3", "price_f", "10").add(id, "11", "basket_s", "basket4", "product_s", "product1", "price_f", "10").commit(cluster.getSolrClient(), COLLECTION);
List<Tuple> tuples = null;
Set<String> paths = null;
GatherNodesStream stream = null;
StreamContext context = new StreamContext();
SolrClientCache cache = new SolrClientCache();
context.setSolrClientCache(cache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withFunctionName("gatherNodes", GatherNodesStream.class).withFunctionName("nodes", GatherNodesStream.class).withFunctionName("search", CloudSolrStream.class).withFunctionName("count", CountMetric.class).withFunctionName("avg", MeanMetric.class).withFunctionName("sum", SumMetric.class).withFunctionName("min", MinMetric.class).withFunctionName("max", MaxMetric.class);
String expr = "nodes(collection1, " + "walk=\"product1->product_s\"," + "gather=\"basket_s\")";
stream = (GatherNodesStream) factory.constructStream(expr);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).getString("node").equals("basket1"));
assertTrue(tuples.get(1).getString("node").equals("basket2"));
assertTrue(tuples.get(2).getString("node").equals("basket3"));
assertTrue(tuples.get(3).getString("node").equals("basket4"));
//Test maxDocFreq param
String docFreqExpr = "gatherNodes(collection1, " + "walk=\"product1, product7->product_s\"," + "maxDocFreq=\"2\"," + "gather=\"basket_s\")";
stream = (GatherNodesStream) factory.constructStream(docFreqExpr);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 1);
assertTrue(tuples.get(0).getString("node").equals("basket2"));
String expr2 = "gatherNodes(collection1, " + expr + "," + "walk=\"node->basket_s\"," + "gather=\"product_s\", count(*), avg(price_f), sum(price_f), min(price_f), max(price_f))";
stream = (GatherNodesStream) factory.constructStream(expr2);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 5);
assertTrue(tuples.get(0).getString("node").equals("product3"));
assertTrue(tuples.get(0).getDouble("count(*)").equals(3.0D));
assertTrue(tuples.get(1).getString("node").equals("product4"));
assertTrue(tuples.get(1).getDouble("count(*)").equals(2.0D));
assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(30.0D));
assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(60.0D));
assertTrue(tuples.get(1).getDouble("min(price_f)").equals(20.0D));
assertTrue(tuples.get(1).getDouble("max(price_f)").equals(40.0D));
assertTrue(tuples.get(2).getString("node").equals("product5"));
assertTrue(tuples.get(2).getDouble("count(*)").equals(1.0D));
assertTrue(tuples.get(3).getString("node").equals("product6"));
assertTrue(tuples.get(3).getDouble("count(*)").equals(1.0D));
assertTrue(tuples.get(4).getString("node").equals("product7"));
assertTrue(tuples.get(4).getDouble("count(*)").equals(1.0D));
//Test list of root nodes
expr = "gatherNodes(collection1, " + "walk=\"product4, product7->product_s\"," + "gather=\"basket_s\")";
stream = (GatherNodesStream) factory.constructStream(expr);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 3);
assertTrue(tuples.get(0).getString("node").equals("basket2"));
assertTrue(tuples.get(1).getString("node").equals("basket3"));
assertTrue(tuples.get(2).getString("node").equals("basket4"));
//Test with negative filter query
expr = "gatherNodes(collection1, " + "walk=\"product4, product7->product_s\"," + "gather=\"basket_s\", fq=\"-basket_s:basket4\")";
stream = (GatherNodesStream) factory.constructStream(expr);
context = new StreamContext();
context.setSolrClientCache(cache);
stream.setStreamContext(context);
tuples = getTuples(stream);
Collections.sort(tuples, new FieldComparator("node", ComparatorOrder.ASCENDING));
assertTrue(tuples.size() == 2);
assertTrue(tuples.get(0).getString("node").equals("basket2"));
assertTrue(tuples.get(1).getString("node").equals("basket3"));
cache.close();
}
Aggregations