use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testConvertEvaluator.
@Test
public void testConvertEvaluator() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
updateRequest.add(id, "1", "miles_i", "50");
updateRequest.add(id, "2", "miles_i", "70");
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
//Test annotating tuple
String expr = "select(calc(), convert(miles, kilometers, 10) as kilometers)";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
double d = (double) tuples.get(0).get("kilometers");
assertTrue(d == (double) (10 * 1.61));
expr = "select(search(" + COLLECTIONORALIAS + ", q=\"*:*\", sort=\"miles_i asc\", fl=\"miles_i\"), convert(miles, kilometers, miles_i) as kilometers)";
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
solrStream = new SolrStream(url, paramsLoc);
context = new StreamContext();
solrStream.setStreamContext(context);
tuples = getTuples(solrStream);
assertTrue(tuples.size() == 2);
d = (double) tuples.get(0).get("kilometers");
assertTrue(d == (double) (50 * 1.61));
d = (double) tuples.get(1).get("kilometers");
assertTrue(d == (double) (70 * 1.61));
expr = "parallel(" + COLLECTIONORALIAS + ", workers=2, sort=\"miles_i asc\", select(search(" + COLLECTIONORALIAS + ", q=\"*:*\", partitionKeys=miles_i, sort=\"miles_i asc\", fl=\"miles_i\"), convert(miles, kilometers, miles_i) as kilometers))";
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
solrStream = new SolrStream(url, paramsLoc);
context = new StreamContext();
solrStream.setStreamContext(context);
tuples = getTuples(solrStream);
assertTrue(tuples.size() == 2);
d = (double) tuples.get(0).get("kilometers");
assertTrue(d == (double) (50 * 1.61));
d = (double) tuples.get(1).get("kilometers");
assertTrue(d == (double) (70 * 1.61));
expr = "select(stats(" + COLLECTIONORALIAS + ", q=\"*:*\", sum(miles_i)), convert(miles, kilometers, sum(miles_i)) as kilometers)";
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
solrStream = new SolrStream(url, paramsLoc);
context = new StreamContext();
solrStream.setStreamContext(context);
tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
d = (double) tuples.get(0).get("kilometers");
assertTrue(d == (double) (120 * 1.61));
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testParallelShuffleStream.
@Test
public void testParallelShuffleStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").add(id, "5", "a_s", "hello1", "a_i", "10", "a_f", "1").add(id, "6", "a_s", "hello1", "a_i", "11", "a_f", "5").add(id, "7", "a_s", "hello1", "a_i", "12", "a_f", "5").add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "9", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "10", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "11", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "12", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "13", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "14", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "15", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "16", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "17", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "18", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "19", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "20", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "21", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "22", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "23", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "24", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "25", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "26", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "27", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "28", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "29", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "30", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "31", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "32", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "33", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "34", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "35", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "36", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "37", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "38", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "39", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "40", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "41", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "42", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "43", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "44", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "45", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "46", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "47", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "48", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "49", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "50", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "51", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "52", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "53", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "54", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "55", "a_s", "hello1", "a_i", "13", "a_f", "4").add(id, "56", "a_s", "hello1", "a_i", "13", "a_f", "1000").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
String zkHost = cluster.getZkServer().getZkAddress();
StreamFactory streamFactory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, zkHost).withFunctionName("shuffle", ShuffleStream.class).withFunctionName("unique", UniqueStream.class).withFunctionName("parallel", ParallelStream.class);
try {
ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", unique(shuffle(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", partitionKeys=\"a_f\"), over=\"a_f\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_f asc\")");
pstream.setStreamFactory(streamFactory);
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assert (tuples.size() == 6);
assertOrder(tuples, 0, 1, 3, 4, 6, 56);
//Test the eofTuples
Map<String, Tuple> eofTuples = pstream.getEofTuples();
//There should be an EOF tuple for each worker.
assert (eofTuples.size() == 2);
assert (pstream.toExpression(streamFactory).toString().contains("shuffle"));
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testParallelMergeStream.
@Test
public void testParallelMergeStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").add(id, "5", "a_s", "hello0", "a_i", "10", "a_f", "0").add(id, "6", "a_s", "hello2", "a_i", "8", "a_f", "0").add(id, "7", "a_s", "hello3", "a_i", "7", "a_f", "3").add(id, "8", "a_s", "hello4", "a_i", "11", "a_f", "4").add(id, "9", "a_s", "hello1", "a_i", "100", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String zkHost = cluster.getZkServer().getZkAddress();
StreamFactory streamFactory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, zkHost).withFunctionName("search", CloudSolrStream.class).withFunctionName("unique", UniqueStream.class).withFunctionName("top", RankStream.class).withFunctionName("group", ReducerStream.class).withFunctionName("merge", MergeStream.class).withFunctionName("parallel", ParallelStream.class);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
//Test ascending
ParallelStream pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 7 9)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\"), on=\"a_i asc\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i asc\")");
pstream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(pstream);
assert (tuples.size() == 9);
assertOrder(tuples, 0, 1, 2, 3, 4, 7, 6, 8, 9);
//Test descending
pstream = (ParallelStream) streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", merge(search(" + COLLECTIONORALIAS + ", q=\"id:(4 1 8 9)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), search(" + COLLECTIONORALIAS + ", q=\"id:(0 2 3 6)\", fl=\"id,a_s,a_i\", sort=\"a_i desc\", partitionKeys=\"a_i\"), on=\"a_i desc\"), workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i desc\")");
pstream.setStreamContext(streamContext);
tuples = getTuples(pstream);
assert (tuples.size() == 8);
assertOrder(tuples, 9, 8, 6, 4, 3, 2, 1, 0);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testUniqueStream.
@Test
public void testUniqueStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamExpression expression;
TupleStream stream;
List<Tuple> tuples;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
StreamFactory factory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("unique", UniqueStream.class);
try {
// Basic test
expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f\")");
stream = new UniqueStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 4);
assertOrder(tuples, 0, 1, 3, 4);
// Basic test desc
expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\"), over=\"a_f\")");
stream = new UniqueStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 4);
assertOrder(tuples, 4, 3, 1, 2);
// Basic w/multi comp
expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
stream = new UniqueStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 5);
assertOrder(tuples, 0, 2, 1, 3, 4);
// full factory w/multi comp
stream = factory.constructStream("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 5);
assertOrder(tuples, 0, 2, 1, 3, 4);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testConvolution.
@Test
public void testConvolution() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
int i = 0;
while (i < 50) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2016", "5", "1"), "price_f", "400.00");
}
while (i < 100) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2015", "5", "1"), "price_f", "300.0");
}
while (i < 150) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2014", "5", "1"), "price_f", "500.0");
}
while (i < 250) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2013", "5", "1"), "price_f", "100.00");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
String cexpr = "let(a=" + expr + ", b=select(" + expr + ",mult(2, count(*)) as nvalue), c=col(a, count(*)), d=col(b, nvalue), tuple(colc=c, cold=d, conv=conv(c,d)))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", cexpr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
List<Number> convolution = (List<Number>) (tuples.get(0)).get("conv");
assertTrue(convolution.size() == 7);
assertTrue(convolution.get(0).equals(20000D));
assertTrue(convolution.get(1).equals(20000D));
assertTrue(convolution.get(2).equals(25000D));
assertTrue(convolution.get(3).equals(30000D));
assertTrue(convolution.get(4).equals(15000D));
assertTrue(convolution.get(5).equals(10000D));
assertTrue(convolution.get(6).equals(5000D));
}
Aggregations