use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testRollupStream.
@Test
public void testRollupStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
Bucket[] buckets = { new Bucket("a_s") };
Metric[] metrics = { new SumMetric("a_i"), new SumMetric("a_f"), new MinMetric("a_i"), new MinMetric("a_f"), new MaxMetric("a_i"), new MaxMetric("a_f"), new MeanMetric("a_i"), new MeanMetric("a_f"), new CountMetric() };
RollupStream rollupStream = new RollupStream(stream, buckets, metrics);
rollupStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(rollupStream);
assert (tuples.size() == 3);
//Test Long and Double Sums
Tuple tuple = tuples.get(0);
String bucket = tuple.getString("a_s");
Double sumi = tuple.getDouble("sum(a_i)");
Double sumf = tuple.getDouble("sum(a_f)");
Double mini = tuple.getDouble("min(a_i)");
Double minf = tuple.getDouble("min(a_f)");
Double maxi = tuple.getDouble("max(a_i)");
Double maxf = tuple.getDouble("max(a_f)");
Double avgi = tuple.getDouble("avg(a_i)");
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), 0.001);
assertEquals(18, sumf.doubleValue(), 0.001);
assertEquals(0, mini.doubleValue(), 0.001);
assertEquals(1, minf.doubleValue(), 0.001);
assertEquals(14, maxi.doubleValue(), 0.001);
assertEquals(10, maxf.doubleValue(), 0.001);
assertEquals(4.25, avgi.doubleValue(), 0.001);
assertEquals(4.5, avgf.doubleValue(), 0.001);
assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket);
assertEquals(38, sumi.doubleValue(), 0.001);
assertEquals(26, sumf.doubleValue(), 0.001);
assertEquals(3, mini.doubleValue(), 0.001);
assertEquals(3, minf.doubleValue(), 0.001);
assertEquals(13, maxi.doubleValue(), 0.001);
assertEquals(9, maxf.doubleValue(), 0.001);
assertEquals(9.5, avgi.doubleValue(), 0.001);
assertEquals(6.5, avgf.doubleValue(), 0.001);
assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11, sumf.doubleValue(), 0.01);
assertEquals(4, mini.doubleValue(), 0.01);
assertEquals(4, minf.doubleValue(), 0.01);
assertEquals(11, maxi.doubleValue(), 0.01);
assertEquals(7, maxf.doubleValue(), 0.01);
assertEquals(7.5, avgi.doubleValue(), 0.01);
assertEquals(5.5, avgf.doubleValue(), 0.01);
assertEquals(2, count.doubleValue(), 0.01);
// Test will null metrics
rollupStream = new RollupStream(stream, buckets, metrics);
rollupStream.setStreamContext(streamContext);
tuples = getTuples(rollupStream);
assert (tuples.size() == 3);
tuple = tuples.get(0);
bucket = tuple.getString("a_s");
assertTrue(bucket.equals("hello0"));
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
assertTrue(bucket.equals("hello3"));
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
assertTrue(bucket.equals("hello4"));
//Test will null value in the grouping field
new UpdateRequest().add(id, "12", "a_s", null, "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "qt", "/export");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
Bucket[] buckets1 = { new Bucket("a_s") };
Metric[] metrics1 = { new SumMetric("a_i"), new SumMetric("a_f"), new MinMetric("a_i"), new MinMetric("a_f"), new MaxMetric("a_i"), new MaxMetric("a_f"), new MeanMetric("a_i"), new MeanMetric("a_f"), new CountMetric() };
rollupStream = new RollupStream(stream, buckets1, metrics1);
rollupStream.setStreamContext(streamContext);
tuples = getTuples(rollupStream);
//Check that we've got the extra NULL bucket
assertEquals(4, tuples.size());
tuple = tuples.get(0);
assertEquals("NULL", tuple.getString("a_s"));
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals(14, sumi.doubleValue(), 0.01);
assertEquals(10, sumf.doubleValue(), 0.01);
assertEquals(14, mini.doubleValue(), 0.01);
assertEquals(10, minf.doubleValue(), 0.01);
assertEquals(14, maxi.doubleValue(), 0.01);
assertEquals(10, maxf.doubleValue(), 0.01);
assertEquals(14, avgi.doubleValue(), 0.01);
assertEquals(10, avgf.doubleValue(), 0.01);
assertEquals(1, count.doubleValue(), 0.01);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testParallelRankStream.
@Test
public void testParallelRankStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "5", "a_s", "hello1", "a_i", "5", "a_f", "1").add(id, "6", "a_s", "hello1", "a_i", "6", "a_f", "1").add(id, "7", "a_s", "hello1", "a_i", "7", "a_f", "1").add(id, "8", "a_s", "hello1", "a_i", "8", "a_f", "1").add(id, "9", "a_s", "hello1", "a_i", "9", "a_f", "1").add(id, "10", "a_s", "hello1", "a_i", "10", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
RankStream rstream = new RankStream(stream, 11, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
ParallelStream pstream = parallelStream(rstream, new FieldComparator("a_i", ComparatorOrder.DESCENDING));
attachStreamFactory(pstream);
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assertEquals(10, tuples.size());
assertOrder(tuples, 10, 9, 8, 7, 6, 5, 4, 3, 2, 0);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testSubFacetStream.
@Test
public void testSubFacetStream() throws Exception {
new UpdateRequest().add(id, "0", "level1_s", "hello0", "level2_s", "a", "a_i", "0", "a_f", "1").add(id, "2", "level1_s", "hello0", "level2_s", "a", "a_i", "2", "a_f", "2").add(id, "3", "level1_s", "hello3", "level2_s", "a", "a_i", "3", "a_f", "3").add(id, "4", "level1_s", "hello4", "level2_s", "a", "a_i", "4", "a_f", "4").add(id, "1", "level1_s", "hello0", "level2_s", "b", "a_i", "1", "a_f", "5").add(id, "5", "level1_s", "hello3", "level2_s", "b", "a_i", "10", "a_f", "6").add(id, "6", "level1_s", "hello4", "level2_s", "b", "a_i", "11", "a_f", "7").add(id, "7", "level1_s", "hello3", "level2_s", "b", "a_i", "12", "a_f", "8").add(id, "8", "level1_s", "hello3", "level2_s", "b", "a_i", "13", "a_f", "9").add(id, "9", "level1_s", "hello0", "level2_s", "b", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_i,a_f");
Bucket[] buckets = { new Bucket("level1_s"), new Bucket("level2_s") };
Metric[] metrics = { new SumMetric("a_i"), new CountMetric() };
FieldComparator[] sorts = { new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING), new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING) };
FacetStream facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(facetStream);
assertEquals(6, tuples.size());
Tuple tuple = tuples.get(0);
String bucket1 = tuple.getString("level1_s");
String bucket2 = tuple.getString("level2_s");
Double sumi = tuple.getDouble("sum(a_i)");
Double count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("b", bucket2);
assertEquals(35, sumi.longValue());
assertEquals(3, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("b", bucket2);
assertEquals(15, sumi.longValue());
assertEquals(2, count, 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("b", bucket2);
assertEquals(11, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("a", bucket2);
assertEquals(4, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("a", bucket2);
assertEquals(3, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("a", bucket2);
assertEquals(2, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
sorts[0] = new FieldComparator("level1_s", ComparatorOrder.DESCENDING);
sorts[1] = new FieldComparator("level2_s", ComparatorOrder.DESCENDING);
facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
tuples = getTuples(facetStream);
assertEquals(6, tuples.size());
tuple = tuples.get(0);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("b", bucket2);
assertEquals(11, sumi.longValue());
assertEquals(1, count, 0.1);
tuple = tuples.get(1);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket1);
assertEquals("a", bucket2);
assertEquals(4, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(2);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("b", bucket2);
assertEquals(35, sumi.longValue());
assertEquals(3, count.doubleValue(), 0.1);
tuple = tuples.get(3);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket1);
assertEquals("a", bucket2);
assertEquals(3, sumi.longValue());
assertEquals(1, count.doubleValue(), 0.1);
tuple = tuples.get(4);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("b", bucket2);
assertEquals(15, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
tuple = tuples.get(5);
bucket1 = tuple.getString("level1_s");
bucket2 = tuple.getString("level2_s");
sumi = tuple.getDouble("sum(a_i)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket1);
assertEquals("a", bucket2);
assertEquals(2, sumi.longValue());
assertEquals(2, count.doubleValue(), 0.1);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method streamTests.
@Test
public void streamTests() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i desc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
stream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(stream);
assertEquals(5, tuples.size());
assertOrder(tuples, 4, 3, 2, 1, 0);
//With Ascending Sort
sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assertEquals(5, tuples.size());
assertOrder(tuples, 0, 1, 2, 3, 4);
//Test compound sort
sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i desc");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assertEquals(5, tuples.size());
assertOrder(tuples, 2, 0, 1, 3, 4);
sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assertEquals(5, tuples.size());
assertOrder(tuples, 0, 2, 1, 3, 4);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method tryWithQt.
// We should be getting the exact same thing back with both the export and select handlers, so test
private void tryWithQt(String which) throws IOException {
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
SolrParams sParams = StreamingTest.mapParams("q", "*:*", "qt", which, "fl", "id,i_sing,i_multi,l_sing,l_multi,f_sing,f_multi,d_sing,d_multi,dt_sing,dt_multi,s_sing,s_multi,b_sing,b_multi", "sort", "i_sing asc");
try (CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams)) {
stream.setStreamContext(streamContext);
// All I really care about is that all the fields are returned. There's
Tuple tuple = getTuple(stream);
assertEquals("Integers should be returned", 11, tuple.getLong("i_sing").longValue());
assertEquals("MV should be returned for i_multi", 12, tuple.getLongs("i_multi").get(0).longValue());
assertEquals("MV should be returned for i_multi", 13, tuple.getLongs("i_multi").get(1).longValue());
assertEquals("longs should be returned", 14, tuple.getLong("l_sing").longValue());
assertEquals("MV should be returned for l_multi", 15, tuple.getLongs("l_multi").get(0).longValue());
assertEquals("MV should be returned for l_multi", 16, tuple.getLongs("l_multi").get(1).longValue());
assertEquals("floats should be returned", 1.7, tuple.getDouble("f_sing").doubleValue(), 0.001);
assertEquals("MV should be returned for f_multi", 1.8, tuple.getDoubles("f_multi").get(0).doubleValue(), 0.001);
assertEquals("MV should be returned for f_multi", 1.9, tuple.getDoubles("f_multi").get(1).doubleValue(), 0.001);
assertEquals("doubles should be returned", 1.2, tuple.getDouble("d_sing").doubleValue(), 0.001);
assertEquals("MV should be returned for d_multi", 1.21, tuple.getDoubles("d_multi").get(0).doubleValue(), 0.001);
assertEquals("MV should be returned for d_multi", 1.22, tuple.getDoubles("d_multi").get(1).doubleValue(), 0.001);
assertTrue("Strings should be returned", tuple.getString("s_sing").equals("single"));
assertTrue("MV should be returned for s_multi", tuple.getStrings("s_multi").get(0).equals("sm1"));
assertTrue("MV should be returned for s_multi", tuple.getStrings("s_multi").get(1).equals("sm2"));
assertTrue("Dates should be returned as Strings", tuple.getString("dt_sing").equals("1980-01-02T11:11:33.890Z"));
assertTrue("MV dates should be returned as Strings for dt_multi", tuple.getStrings("dt_multi").get(0).equals("1981-03-04T01:02:03.780Z"));
assertTrue("MV dates should be returned as Strings for dt_multi", tuple.getStrings("dt_multi").get(1).equals("1981-05-24T04:05:06.990Z"));
// Also test native type conversion
Date dt = new Date(Instant.parse("1980-01-02T11:11:33.890Z").toEpochMilli());
assertTrue("Dates should be returned as Dates", tuple.getDate("dt_sing").equals(dt));
dt = new Date(Instant.parse("1981-03-04T01:02:03.780Z").toEpochMilli());
assertTrue("MV dates should be returned as Dates for dt_multi", tuple.getDates("dt_multi").get(0).equals(dt));
dt = new Date(Instant.parse("1981-05-24T04:05:06.990Z").toEpochMilli());
assertTrue("MV dates should be returned as Dates for dt_multi", tuple.getDates("dt_multi").get(1).equals(dt));
assertTrue("Booleans should be returned", tuple.getBool("b_sing"));
assertFalse("MV boolean should be returned for b_multi", tuple.getBools("b_multi").get(0));
assertTrue("MV boolean should be returned for b_multi", tuple.getBools("b_multi").get(1));
} finally {
solrClientCache.close();
}
}
Aggregations