use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class StreamExpressionTest method testFeaturesSelectionStream.
@Test
public void testFeaturesSelectionStream() throws Exception {
Assume.assumeTrue(!useAlias);
CollectionAdminRequest.createCollection("destinationCollection", "ml", 2, 1).process(cluster.getSolrClient());
AbstractDistribZkTestBase.waitForRecoveriesToFinish("destinationCollection", cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
UpdateRequest updateRequest = new UpdateRequest();
for (int i = 0; i < 5000; i += 2) {
updateRequest.add(id, String.valueOf(i), "whitetok", "a b c d", "out_i", "1");
updateRequest.add(id, String.valueOf(i + 1), "whitetok", "a b e f", "out_i", "0");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamExpression expression;
TupleStream stream;
List<Tuple> tuples;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withCollectionZkHost("destinationCollection", cluster.getZkServer().getZkAddress()).withFunctionName("featuresSelection", FeaturesSelectionStream.class).withFunctionName("search", CloudSolrStream.class).withFunctionName("update", UpdateStream.class);
try {
String featuresExpression = "featuresSelection(collection1, q=\"*:*\", featureSet=\"first\", field=\"whitetok\", outcome=\"out_i\", numTerms=4)";
// basic
expression = StreamExpressionParser.parse(featuresExpression);
stream = new FeaturesSelectionStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 4);
assertTrue(tuples.get(0).get("term_s").equals("c"));
assertTrue(tuples.get(1).get("term_s").equals("d"));
assertTrue(tuples.get(2).get("term_s").equals("e"));
assertTrue(tuples.get(3).get("term_s").equals("f"));
// update
expression = StreamExpressionParser.parse("update(destinationCollection, batchSize=5, " + featuresExpression + ")");
stream = new UpdateStream(expression, factory);
stream.setStreamContext(streamContext);
getTuples(stream);
cluster.getSolrClient().commit("destinationCollection");
expression = StreamExpressionParser.parse("search(destinationCollection, q=featureSet_s:first, fl=\"index_i, term_s\", sort=\"index_i asc\")");
stream = new CloudSolrStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assertEquals(4, tuples.size());
assertTrue(tuples.get(0).get("term_s").equals("c"));
assertTrue(tuples.get(1).get("term_s").equals("d"));
assertTrue(tuples.get(2).get("term_s").equals("e"));
assertTrue(tuples.get(3).get("term_s").equals("f"));
} finally {
CollectionAdminRequest.deleteCollection("destinationCollection").process(cluster.getSolrClient());
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class StreamExpressionTest method testRankStream.
@Test
public void testRankStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamExpression expression;
TupleStream stream;
List<Tuple> tuples;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
StreamFactory factory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("unique", UniqueStream.class).withFunctionName("top", RankStream.class);
try {
// Basic test
expression = StreamExpressionParser.parse("top(" + "n=3," + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")," + "sort=\"a_f asc, a_i asc\")");
stream = new RankStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 3);
assertOrder(tuples, 0, 2, 1);
// Basic test desc
expression = StreamExpressionParser.parse("top(" + "n=2," + "unique(" + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc\")," + "over=\"a_f\")," + "sort=\"a_f desc\")");
stream = new RankStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 2);
assertOrder(tuples, 4, 3);
// full factory
stream = factory.constructStream("top(" + "n=4," + "unique(" + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")," + "over=\"a_f\")," + "sort=\"a_f asc\")");
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 4);
assertOrder(tuples, 0, 1, 3, 4);
// full factory, switch order
stream = factory.constructStream("top(" + "n=4," + "unique(" + "search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f desc, a_i desc\")," + "over=\"a_f\")," + "sort=\"a_f asc\")");
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 4);
assertOrder(tuples, 2, 1, 3, 4);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class StreamExpressionTest method testOuterHashJoinStream.
@Test
public void testOuterHashJoinStream() throws Exception {
new UpdateRequest().add(id, "1", "side_s", "left", "join1_i", "0", "join2_s", "a", "ident_s", // 8, 9
"left_1").add(id, "15", "side_s", "left", "join1_i", "0", "join2_s", "a", "ident_s", // 8, 9
"left_1").add(id, "2", "side_s", "left", "join1_i", "0", "join2_s", "b", "ident_s", "left_2").add(id, "3", "side_s", "left", "join1_i", "1", "join2_s", "a", "ident_s", // 10
"left_3").add(id, "4", "side_s", "left", "join1_i", "1", "join2_s", "b", "ident_s", // 11
"left_4").add(id, "5", "side_s", "left", "join1_i", "1", "join2_s", "c", "ident_s", // 12
"left_5").add(id, "6", "side_s", "left", "join1_i", "2", "join2_s", "d", "ident_s", "left_6").add(id, "7", "side_s", "left", "join1_i", "3", "join2_s", "e", "ident_s", // 14
"left_7").add(id, "8", "side_s", "right", "join1_i", "0", "join2_s", "a", "ident_s", "right_1", "join3_i", // 1,15
"0").add(id, "9", "side_s", "right", "join1_i", "0", "join2_s", "a", "ident_s", "right_2", "join3_i", // 1,15
"0").add(id, "10", "side_s", "right", "join1_i", "1", "join2_s", "a", "ident_s", "right_3", "join3_i", // 3
"1").add(id, "11", "side_s", "right", "join1_i", "1", "join2_s", "b", "ident_s", "right_4", "join3_i", // 4
"1").add(id, "12", "side_s", "right", "join1_i", "1", "join2_s", "c", "ident_s", "right_5", "join3_i", // 5
"1").add(id, "13", "side_s", "right", "join1_i", "2", "join2_s", "dad", "ident_s", "right_6", "join3_i", "2").add(id, "14", "side_s", "right", "join1_i", "3", "join2_s", "e", "ident_s", "right_7", "join3_i", // 7
"3").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamExpression expression;
TupleStream stream;
List<Tuple> tuples;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("outerHashJoin", OuterHashJoinStream.class);
try {
// Basic test
expression = StreamExpressionParser.parse("outerHashJoin(" + "search(collection1, q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc, id asc\")," + "hashed=search(collection1, q=\"side_s:right\", fl=\"join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc\")," + "on=\"join1_i, join2_s\")");
stream = new OuterHashJoinStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 10);
assertOrder(tuples, 1, 1, 15, 15, 2, 3, 4, 5, 6, 7);
// Basic desc
expression = StreamExpressionParser.parse("outerHashJoin(" + "search(collection1, q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i desc, join2_s asc\")," + "hashed=search(collection1, q=\"side_s:right\", fl=\"join1_i,join2_s,ident_s\", sort=\"join1_i desc, join2_s asc\")," + "on=\"join1_i, join2_s\")");
stream = new OuterHashJoinStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 10);
assertOrder(tuples, 7, 6, 3, 4, 5, 1, 1, 15, 15, 2);
// Results in both searches, no join matches
expression = StreamExpressionParser.parse("outerHashJoin(" + "search(collection1, q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"ident_s asc\")," + "hashed=search(collection1, q=\"side_s:right\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"ident_s asc\")," + "on=\"ident_s\")");
stream = new OuterHashJoinStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 8);
assertOrder(tuples, 1, 15, 2, 3, 4, 5, 6, 7);
// Basic test
expression = StreamExpressionParser.parse("outerHashJoin(" + "search(collection1, q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc, id asc\")," + "hashed=search(collection1, q=\"side_s:right\", fl=\"join3_i,join2_s,ident_s\", sort=\"join2_s asc\")," + "on=\"join1_i=join3_i, join2_s\")");
stream = new OuterHashJoinStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assert (tuples.size() == 10);
assertOrder(tuples, 1, 1, 15, 15, 2, 3, 4, 5, 6, 7);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class StreamExpressionTest method testRegressAndPredict.
@Test
public void testRegressAndPredict() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
updateRequest.add(id, "1", "price_f", "100.0", "col_s", "a", "order_i", "1");
updateRequest.add(id, "2", "price_f", "200.0", "col_s", "a", "order_i", "2");
updateRequest.add(id, "3", "price_f", "300.0", "col_s", "a", "order_i", "3");
updateRequest.add(id, "4", "price_f", "100.0", "col_s", "a", "order_i", "4");
updateRequest.add(id, "5", "price_f", "200.0", "col_s", "a", "order_i", "5");
updateRequest.add(id, "6", "price_f", "400.0", "col_s", "a", "order_i", "6");
updateRequest.add(id, "7", "price_f", "600.0", "col_s", "a", "order_i", "7");
updateRequest.add(id, "8", "price_f", "200.0", "col_s", "b", "order_i", "1");
updateRequest.add(id, "9", "price_f", "400.0", "col_s", "b", "order_i", "2");
updateRequest.add(id, "10", "price_f", "600.0", "col_s", "b", "order_i", "3");
updateRequest.add(id, "11", "price_f", "200.0", "col_s", "b", "order_i", "4");
updateRequest.add(id, "12", "price_f", "400.0", "col_s", "b", "order_i", "5");
updateRequest.add(id, "13", "price_f", "800.0", "col_s", "b", "order_i", "6");
updateRequest.add(id, "14", "price_f", "1200.0", "col_s", "b", "order_i", "7");
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr1 = "search(" + COLLECTIONORALIAS + ", q=\"col_s:a\", fl=\"price_f, order_i\", sort=\"order_i asc\")";
String expr2 = "search(" + COLLECTIONORALIAS + ", q=\"col_s:b\", fl=\"price_f, order_i\", sort=\"order_i asc\")";
String cexpr = "let(a=" + expr1 + ", b=" + expr2 + ", c=col(a, price_f), d=col(b, price_f), e=regress(c, d), tuple(regress=e, p=predict(e, 300)))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", cexpr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
Tuple tuple = tuples.get(0);
Map regression = (Map) tuple.get("regress");
double slope = (double) regression.get("slope");
double intercept = (double) regression.get("intercept");
assertTrue(slope == 2.0D);
assertTrue(intercept == 0.0D);
double prediction = tuple.getDouble("p");
assertTrue(prediction == 600.0D);
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class StreamExpressionTest method testReverse.
@Test
public void testReverse() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
int i = 0;
while (i < 50) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2016", "5", "1"), "price_f", "400.00");
}
while (i < 100) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2015", "5", "1"), "price_f", "300.0");
}
while (i < 150) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2014", "5", "1"), "price_f", "500.0");
}
while (i < 250) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2013", "5", "1"), "price_f", "100.00");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
String cexpr = "let(a=" + expr + ", c=col(a, max(price_f)), tuple(reverse=rev(c)))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", cexpr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
List<Number> reverse = (List<Number>) tuples.get(0).get("reverse");
assertTrue(reverse.size() == 4);
assertTrue(reverse.get(0).doubleValue() == 400D);
assertTrue(reverse.get(1).doubleValue() == 300D);
assertTrue(reverse.get(2).doubleValue() == 500D);
assertTrue(reverse.get(3).doubleValue() == 100D);
}
Aggregations