use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testCommitStream.
////////////////////////////////////////////
@Test
public void testCommitStream() throws Exception {
CollectionAdminRequest.createCollection("destinationCollection", "conf", 2, 1).process(cluster.getSolrClient());
AbstractDistribZkTestBase.waitForRecoveriesToFinish("destinationCollection", cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0", "s_multi", "aaaa", "s_multi", "bbbb", "i_multi", "4", "i_multi", "7").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0", "s_multi", "aaaa1", "s_multi", "bbbb1", "i_multi", "44", "i_multi", "77").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3", "s_multi", "aaaa2", "s_multi", "bbbb2", "i_multi", "444", "i_multi", "777").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4", "s_multi", "aaaa3", "s_multi", "bbbb3", "i_multi", "4444", "i_multi", "7777").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1", "s_multi", "aaaa4", "s_multi", "bbbb4", "i_multi", "44444", "i_multi", "77777").commit(cluster.getSolrClient(), "collection1");
StreamExpression expression;
TupleStream stream;
Tuple t;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withCollectionZkHost("destinationCollection", cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("update", UpdateStream.class).withFunctionName("commit", CommitStream.class);
try {
//Copy all docs to destinationCollection
expression = StreamExpressionParser.parse("commit(destinationCollection, batchSize=2, update(destinationCollection, batchSize=5, search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", sort=\"a_f asc, a_i asc\")))");
stream = factory.constructStream(expression);
stream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(stream);
//Ensure that all CommitStream tuples indicate the correct number of copied/indexed docs
assert (tuples.size() == 1);
t = tuples.get(0);
assert (t.EOF == false);
assertEquals(5, t.get("batchIndexed"));
//Ensure that destinationCollection actually has the new docs.
expression = StreamExpressionParser.parse("search(destinationCollection, q=*:*, fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", sort=\"a_i asc\")");
stream = new CloudSolrStream(expression, factory);
stream.setStreamContext(streamContext);
tuples = getTuples(stream);
assertEquals(5, tuples.size());
Tuple tuple = tuples.get(0);
assert (tuple.getLong("id") == 0);
assert (tuple.get("a_s").equals("hello0"));
assert (tuple.getLong("a_i") == 0);
assert (tuple.getDouble("a_f") == 0.0);
assertList(tuple.getStrings("s_multi"), "aaaa", "bbbb");
assertList(tuple.getLongs("i_multi"), Long.parseLong("4"), Long.parseLong("7"));
tuple = tuples.get(1);
assert (tuple.getLong("id") == 1);
assert (tuple.get("a_s").equals("hello1"));
assert (tuple.getLong("a_i") == 1);
assert (tuple.getDouble("a_f") == 1.0);
assertList(tuple.getStrings("s_multi"), "aaaa4", "bbbb4");
assertList(tuple.getLongs("i_multi"), Long.parseLong("44444"), Long.parseLong("77777"));
tuple = tuples.get(2);
assert (tuple.getLong("id") == 2);
assert (tuple.get("a_s").equals("hello2"));
assert (tuple.getLong("a_i") == 2);
assert (tuple.getDouble("a_f") == 0.0);
assertList(tuple.getStrings("s_multi"), "aaaa1", "bbbb1");
assertList(tuple.getLongs("i_multi"), Long.parseLong("44"), Long.parseLong("77"));
tuple = tuples.get(3);
assert (tuple.getLong("id") == 3);
assert (tuple.get("a_s").equals("hello3"));
assert (tuple.getLong("a_i") == 3);
assert (tuple.getDouble("a_f") == 3.0);
assertList(tuple.getStrings("s_multi"), "aaaa2", "bbbb2");
assertList(tuple.getLongs("i_multi"), Long.parseLong("444"), Long.parseLong("777"));
tuple = tuples.get(4);
assert (tuple.getLong("id") == 4);
assert (tuple.get("a_s").equals("hello4"));
assert (tuple.getLong("a_i") == 4);
assert (tuple.getDouble("a_f") == 4.0);
assertList(tuple.getStrings("s_multi"), "aaaa3", "bbbb3");
assertList(tuple.getLongs("i_multi"), Long.parseLong("4444"), Long.parseLong("7777"));
} finally {
CollectionAdminRequest.deleteCollection("destinationCollection").process(cluster.getSolrClient());
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testParallelComplementStream.
@Test
public void testParallelComplementStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "setA", "a_i", "0").add(id, "2", "a_s", "setA", "a_i", "1").add(id, "3", "a_s", "setA", "a_i", "2").add(id, "4", "a_s", "setA", "a_i", "3").add(id, "5", "a_s", "setB", "a_i", "2").add(id, "6", "a_s", "setB", "a_i", "3").add(id, "9", "a_s", "setB", "a_i", "5").add(id, "7", "a_s", "setAB", "a_i", "0").add(id, "8", "a_s", "setAB", "a_i", "6").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamFactory streamFactory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("complement", ComplementStream.class).withFunctionName("parallel", ParallelStream.class);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
final String zkHost = cluster.getZkServer().getZkAddress();
final TupleStream stream = streamFactory.constructStream("parallel(" + "collection1, " + "complement(" + "search(collection1, q=a_s:(setA || setAB), fl=\"id,a_s,a_i\", sort=\"a_i asc, a_s asc\", partitionKeys=\"a_i\")," + "search(collection1, q=a_s:(setB || setAB), fl=\"id,a_s,a_i\", sort=\"a_i asc\", partitionKeys=\"a_i\")," + "on=\"a_i\")," + "workers=\"2\", zkHost=\"" + zkHost + "\", sort=\"a_i asc\")");
stream.setStreamContext(streamContext);
final List<Tuple> tuples = getTuples(stream);
assert (tuples.size() == 1);
assertOrder(tuples, 2);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testSqlStream.
@Test
public void testSqlStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
List<Tuple> tuples;
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
try {
StringBuilder buf = new StringBuilder();
for (String shardUrl : shardUrls) {
if (buf.length() > 0) {
buf.append(",");
}
buf.append(shardUrl);
}
ModifiableSolrParams solrParams = new ModifiableSolrParams();
solrParams.add("qt", "/stream");
solrParams.add("expr", "sql(" + COLLECTIONORALIAS + ", stmt=\"select id from collection1 order by a_i asc\")");
SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
solrStream.setStreamContext(streamContext);
tuples = getTuples(solrStream);
assert (tuples.size() == 5);
assertOrder(tuples, 0, 1, 2, 3, 4);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testTimeSeriesStream.
@Test
public void testTimeSeriesStream() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
int i = 0;
while (i < 50) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2016", "5", "1"), "price_f", "400.00");
}
while (i < 100) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2015", "5", "1"), "price_f", "300.0");
}
while (i < 150) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2014", "5", "1"), "price_f", "500.0");
}
while (i < 250) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2013", "5", "1"), "price_f", "100.00");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).get("test_dt").equals("2013-01-01T01:00:00Z"));
assertTrue(tuples.get(0).getLong("count(*)").equals(100L));
assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
assertTrue(tuples.get(1).get("test_dt").equals("2014-01-01T01:00:00Z"));
assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
assertTrue(tuples.get(2).get("test_dt").equals("2015-01-01T01:00:00Z"));
assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
assertTrue(tuples.get(3).get("test_dt").equals("2016-01-01T01:00:00Z"));
assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "format=\"yyyy\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
solrStream = new SolrStream(url, paramsLoc);
solrStream.setStreamContext(context);
tuples = getTuples(solrStream);
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).get("test_dt").equals("2013"));
assertTrue(tuples.get(0).getLong("count(*)").equals(100L));
assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
assertTrue(tuples.get(1).get("test_dt").equals("2014"));
assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
assertTrue(tuples.get(2).get("test_dt").equals("2015"));
assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
assertTrue(tuples.get(3).get("test_dt").equals("2016"));
assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "format=\"yyyy-MM\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", expr);
paramsLoc.set("qt", "/stream");
solrStream = new SolrStream(url, paramsLoc);
solrStream.setStreamContext(context);
tuples = getTuples(solrStream);
assertTrue(tuples.size() == 4);
assertTrue(tuples.get(0).get("test_dt").equals("2013-01"));
assertTrue(tuples.get(0).getLong("count(*)").equals(100L));
assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
assertTrue(tuples.get(1).get("test_dt").equals("2014-01"));
assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
assertTrue(tuples.get(2).get("test_dt").equals("2015-01"));
assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
assertTrue(tuples.get(3).get("test_dt").equals("2016-01"));
assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
}
use of org.apache.solr.client.solrj.request.UpdateRequest in project lucene-solr by apache.
the class StreamExpressionTest method testCopyOfRange.
public void testCopyOfRange() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
int i = 0;
while (i < 50) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2016", "5", "1"), "price_f", "400.00");
}
while (i < 100) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2015", "5", "1"), "price_f", "300.0");
}
while (i < 150) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2014", "5", "1"), "price_f", "500.0");
}
while (i < 250) {
updateRequest.add(id, "id_" + (++i), "test_dt", getDateString("2013", "5", "1"), "price_f", "100.00");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr = "timeseries(" + COLLECTIONORALIAS + ", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " + "end=\"2016-12-01T01:00:00.000Z\", " + "gap=\"+1YEAR\", " + "field=\"test_dt\", " + "count(*), sum(price_f), max(price_f), min(price_f))";
String cexpr = "let(a=" + expr + ", c=col(a, max(price_f)), tuple(copy=copyOfRange(c, 1, 3)))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", cexpr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
List<Number> copy1 = (List<Number>) tuples.get(0).get("copy");
assertTrue(copy1.size() == 2);
assertTrue(copy1.get(0).doubleValue() == 500D);
assertTrue(copy1.get(1).doubleValue() == 300D);
}
Aggregations