use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testParallelUniqueStream.
@Test
public void testParallelUniqueStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").add(id, "5", "a_s", "hello1", "a_i", "10", "a_f", "1").add(id, "6", "a_s", "hello1", "a_i", "11", "a_f", "5").add(id, "7", "a_s", "hello1", "a_i", "12", "a_f", "5").add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc", "partitionKeys", "a_f");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams);
UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f"));
ParallelStream pstream = parallelStream(ustream, new FieldComparator("a_f", ComparatorOrder.ASCENDING));
attachStreamFactory(pstream);
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assertEquals(5, tuples.size());
assertOrder(tuples, 0, 1, 3, 4, 6);
//Test the eofTuples
Map<String, Tuple> eofTuples = pstream.getEofTuples();
//There should be an EOF tuple for each worker.
assertEquals(numWorkers, eofTuples.size());
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testReducerStream.
@Test
public void testReducerStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
//Test with spaces in the parameter lists.
SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5));
rstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(rstream);
assertEquals(3, tuples.size());
Tuple t0 = tuples.get(0);
List<Map> maps0 = t0.getMaps("group");
assertMaps(maps0, 0, 2, 1, 9);
Tuple t1 = tuples.get(1);
List<Map> maps1 = t1.getMaps("group");
assertMaps(maps1, 3, 5, 7, 8);
Tuple t2 = tuples.get(2);
List<Map> maps2 = t2.getMaps("group");
assertMaps(maps2, 4, 6);
//Test with spaces in the parameter lists using a comparator
sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
rstream = new ReducerStream(stream, new FieldComparator("a_s", ComparatorOrder.ASCENDING), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5));
rstream.setStreamContext(streamContext);
tuples = getTuples(rstream);
assertEquals(3, tuples.size());
t0 = tuples.get(0);
maps0 = t0.getMaps("group");
assertMaps(maps0, 9, 1, 2, 0);
t1 = tuples.get(1);
maps1 = t1.getMaps("group");
assertMaps(maps1, 8, 7, 5, 3);
t2 = tuples.get(2);
maps2 = t2.getMaps("group");
assertMaps(maps2, 6, 4);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testFacetStream.
@Test
public void testFacetStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc");
Bucket[] buckets = { new Bucket("a_s") };
Metric[] metrics = { new SumMetric("a_i"), new SumMetric("a_f"), new MinMetric("a_i"), new MinMetric("a_f"), new MaxMetric("a_i"), new MaxMetric("a_f"), new MeanMetric("a_i"), new MeanMetric("a_f"), new CountMetric() };
FieldComparator[] sorts = { new FieldComparator("sum(a_i)", ComparatorOrder.ASCENDING) };
FacetStream facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
List<Tuple> tuples = getTuples(facetStream);
assert (tuples.size() == 3);
//Test Long and Double Sums
Tuple tuple = tuples.get(0);
String bucket = tuple.getString("a_s");
Double sumi = tuple.getDouble("sum(a_i)");
Double sumf = tuple.getDouble("sum(a_f)");
Double mini = tuple.getDouble("min(a_i)");
Double minf = tuple.getDouble("min(a_f)");
Double maxi = tuple.getDouble("max(a_i)");
Double maxf = tuple.getDouble("max(a_f)");
Double avgi = tuple.getDouble("avg(a_i)");
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11.0, sumf.doubleValue(), 0.01);
assertEquals(4.0, mini.doubleValue(), 0.01);
assertEquals(4.0, minf.doubleValue(), 0.01);
assertEquals(11.0, maxi.doubleValue(), 0.01);
assertEquals(7.0, maxf.doubleValue(), 0.01);
assertEquals(7.5, avgi.doubleValue(), 0.01);
assertEquals(5.5, avgf.doubleValue(), 0.01);
assertEquals(2, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), .01);
assertEquals(18, sumf.doubleValue(), .01);
assertEquals(0.0, mini.doubleValue(), .01);
assertEquals(1.0, minf.doubleValue(), .01);
assertEquals(14.0, maxi.doubleValue(), .01);
assertEquals(10.0, maxf.doubleValue(), .01);
assertEquals(4.25, avgi.doubleValue(), .01);
assertEquals(4.5, avgf.doubleValue(), .01);
assertEquals(4, count.doubleValue(), .01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket);
assertEquals(38.0, sumi.doubleValue(), 0.01);
assertEquals(26.0, sumf.doubleValue(), 0.01);
assertEquals(3.0, mini.doubleValue(), 0.01);
assertEquals(3.0, minf.doubleValue(), 0.01);
assertEquals(13.0, maxi.doubleValue(), 0.01);
assertEquals(9.0, maxf.doubleValue(), 0.01);
assertEquals(9.5, avgi.doubleValue(), 0.01);
assertEquals(6.5, avgf.doubleValue(), 0.01);
assertEquals(4, count.doubleValue(), 0.01);
//Reverse the Sort.
sorts[0] = new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING);
facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
tuples = getTuples(facetStream);
assertEquals(3, tuples.size());
//Test Long and Double Sums
tuple = tuples.get(0);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket);
assertEquals(38, sumi.doubleValue(), 0.1);
assertEquals(26, sumf.doubleValue(), 0.1);
assertEquals(3, mini.doubleValue(), 0.1);
assertEquals(3, minf.doubleValue(), 0.1);
assertEquals(13, maxi.doubleValue(), 0.1);
assertEquals(9, maxf.doubleValue(), 0.1);
assertEquals(9.5, avgi.doubleValue(), 0.1);
assertEquals(6.5, avgf.doubleValue(), 0.1);
assertEquals(4, count.doubleValue(), 0.1);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), 0.01);
assertEquals(18, sumf.doubleValue(), 0.01);
assertEquals(0, mini.doubleValue(), 0.01);
assertEquals(1, minf.doubleValue(), 0.01);
assertEquals(14, maxi.doubleValue(), 0.01);
assertEquals(10, maxf.doubleValue(), 0.01);
assertEquals(4.25, avgi.doubleValue(), 0.01);
assertEquals(4.5, avgf.doubleValue(), 0.01);
assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11, sumf.doubleValue(), 0.01);
assertEquals(4.0, mini.doubleValue(), 0.01);
assertEquals(4.0, minf.doubleValue(), 0.01);
assertEquals(11.0, maxi.doubleValue(), 0.01);
assertEquals(7.0, maxf.doubleValue(), 0.01);
assertEquals(7.5, avgi.doubleValue(), 0.01);
assertEquals(5.5, avgf.doubleValue(), 0.01);
assertEquals(2, count.doubleValue(), 0.01);
//Test index sort
sorts[0] = new FieldComparator("a_s", ComparatorOrder.DESCENDING);
facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
tuples = getTuples(facetStream);
assertEquals(3, tuples.size());
tuple = tuples.get(0);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11, sumf.doubleValue(), 0.01);
assertEquals(4, mini.doubleValue(), 0.01);
assertEquals(4, minf.doubleValue(), 0.01);
assertEquals(11, maxi.doubleValue(), 0.01);
assertEquals(7, maxf.doubleValue(), 0.01);
assertEquals(7.5, avgi.doubleValue(), 0.01);
assertEquals(5.5, avgf.doubleValue(), 0.01);
assertEquals(2, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertTrue(bucket.equals("hello3"));
assertTrue(sumi.doubleValue() == 38.0D);
assertTrue(sumf.doubleValue() == 26.0D);
assertTrue(mini.doubleValue() == 3.0D);
assertTrue(minf.doubleValue() == 3.0D);
assertTrue(maxi.doubleValue() == 13.0D);
assertTrue(maxf.doubleValue() == 9.0D);
assertTrue(avgi.doubleValue() == 9.5D);
assertTrue(avgf.doubleValue() == 6.5D);
assertTrue(count.doubleValue() == 4);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), 0.01);
assertEquals(18, sumf.doubleValue(), 0.01);
assertEquals(0, mini.doubleValue(), 0.01);
assertEquals(1, minf.doubleValue(), 0.01);
assertEquals(14, maxi.doubleValue(), 0.01);
assertEquals(10, maxf.doubleValue(), 0.01);
assertEquals(4.25, avgi.doubleValue(), 0.01);
assertEquals(4.5, avgf.doubleValue(), 0.01);
assertEquals(4, count.doubleValue(), 0.01);
//Test index sort
sorts[0] = new FieldComparator("a_s", ComparatorOrder.ASCENDING);
facetStream = new FacetStream(zkHost, COLLECTIONORALIAS, sParamsA, buckets, metrics, sorts, 100);
facetStream.setStreamContext(streamContext);
tuples = getTuples(facetStream);
assertEquals(3, tuples.size());
tuple = tuples.get(0);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), 0.01);
assertEquals(18, sumf.doubleValue(), 0.01);
assertEquals(0, mini.doubleValue(), 0.01);
assertEquals(1, minf.doubleValue(), 0.01);
assertEquals(14, maxi.doubleValue(), 0.01);
assertEquals(10, maxf.doubleValue(), 0.01);
assertEquals(4.25, avgi.doubleValue(), 0.0001);
assertEquals(4.5, avgf.doubleValue(), 0.001);
assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket);
assertEquals(38, sumi.doubleValue(), 0.01);
assertEquals(26, sumf.doubleValue(), 0.01);
assertEquals(3, mini.doubleValue(), 0.01);
assertEquals(3, minf.doubleValue(), 0.01);
assertEquals(13, maxi.doubleValue(), 0.01);
assertEquals(9, maxf.doubleValue(), 0.01);
assertEquals(9.5, avgi.doubleValue(), 0.01);
assertEquals(6.5, avgf.doubleValue(), 0.01);
assertEquals(4, count.doubleValue(), 0.01);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11.0, sumf.doubleValue(), 0.1);
assertEquals(4.0, mini.doubleValue(), 0.1);
assertEquals(4.0, minf.doubleValue(), 0.1);
assertEquals(11.0, maxi.doubleValue(), 0.1);
assertEquals(7.0, maxf.doubleValue(), 0.1);
assertEquals(7.5, avgi.doubleValue(), 0.1);
assertEquals(5.5, avgf.doubleValue(), 0.1);
assertEquals(2, count.doubleValue(), 0.1);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method checkReturnValsForEmpty.
private void checkReturnValsForEmpty(String[] fields) throws IOException {
Set<String> voids = new HashSet<>(Arrays.asList(voidIds));
StringBuilder fl = new StringBuilder("id");
for (String f : fields) {
fl.append(",").append(f);
}
SolrParams sParams = mapParams("q", "*:*", "qt", "/export", "fl", fl.toString(), "sort", "id asc");
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try (CloudSolrStream solrStream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams)) {
solrStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(solrStream);
assertEquals("There should be exactly 32 responses returned", 32, tuples.size());
for (Tuple tuple : tuples) {
String id = tuple.getString("id");
if (voids.contains(id)) {
for (String f : fields) {
assertNull("Should have returned a void for field " + f + " doc " + id, tuple.get(f));
}
} else {
for (String f : fields) {
assertNotNull("Should have returned a value for field " + f + " doc " + id, tuple.get(f));
}
}
}
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.SolrClientCache in project lucene-solr by apache.
the class StreamingTest method testExceptionStream.
@Test
@Ignore
public void testExceptionStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
//Test an error that comes originates from the /select handler
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
ExceptionStream estream = new ExceptionStream(stream);
estream.setStreamContext(streamContext);
Tuple t = getTuple(estream);
assertTrue(t.EOF);
assertTrue(t.EXCEPTION);
assertTrue(t.getException().contains("sort param field can't be found: blah"));
//Test an error that comes originates from the /export handler
sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export");
stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
estream = new ExceptionStream(stream);
estream.setStreamContext(streamContext);
t = getTuple(estream);
assertTrue(t.EOF);
assertTrue(t.EXCEPTION);
//The /export handler will pass through a real exception.
assertTrue(t.getException().contains("undefined field:"));
} finally {
solrClientCache.close();
}
}
Aggregations