use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class HelloStream method read.
@Override
public Tuple read() throws IOException {
if (isSentHelloWorld) {
Map m = new HashMap();
m.put("EOF", true);
return new Tuple(m);
} else {
isSentHelloWorld = true;
Map m = new HashMap<>();
m.put("msg", "Hello World!");
return new Tuple(m);
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class TestSQLHandler method getTuples.
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
tupleStream.open();
List<Tuple> tuples = new ArrayList<>();
for (; ; ) {
Tuple t = tupleStream.read();
if (t.EOF) {
break;
} else {
tuples.add(t);
}
}
tupleStream.close();
return tuples;
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class TestSQLHandler method testMixedCaseFields.
private void testMixedCaseFields() throws Exception {
try {
CloudJettyRunner jetty = this.cloudJettys.get(0);
del("*:*");
commit();
indexDoc(sdoc("id", "1", "Text_t", "XXXX XXXX", "Str_s", "a", "Field_i", "7"));
indexDoc(sdoc("id", "2", "Text_t", "XXXX XXXX", "Str_s", "b", "Field_i", "8"));
indexDoc(sdoc("id", "3", "Text_t", "XXXX XXXX", "Str_s", "a", "Field_i", "20"));
indexDoc(sdoc("id", "4", "Text_t", "XXXX XXXX", "Str_s", "b", "Field_i", "11"));
indexDoc(sdoc("id", "5", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "30"));
indexDoc(sdoc("id", "6", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "40"));
indexDoc(sdoc("id", "7", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "50"));
indexDoc(sdoc("id", "8", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "60"));
commit();
SolrParams sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "map_reduce", "stmt", "select id, Field_i, Str_s from collection1 where Text_t='XXXX' order by Field_i desc");
SolrStream solrStream = new SolrStream(jetty.url, sParams);
List<Tuple> tuples = getTuples(solrStream);
assert (tuples.size() == 8);
Tuple tuple;
tuple = tuples.get(0);
assert (tuple.getLong("id") == 8);
assert (tuple.getLong("Field_i") == 60);
assert (tuple.get("Str_s").equals("c"));
tuple = tuples.get(1);
assert (tuple.getLong("id") == 7);
assert (tuple.getLong("Field_i") == 50);
assert (tuple.get("Str_s").equals("c"));
tuple = tuples.get(2);
assert (tuple.getLong("id") == 6);
assert (tuple.getLong("Field_i") == 40);
assert (tuple.get("Str_s").equals("c"));
tuple = tuples.get(3);
assert (tuple.getLong("id") == 5);
assert (tuple.getLong("Field_i") == 30);
assert (tuple.get("Str_s").equals("c"));
tuple = tuples.get(4);
assert (tuple.getLong("id") == 3);
assert (tuple.getLong("Field_i") == 20);
assert (tuple.get("Str_s").equals("a"));
tuple = tuples.get(5);
assert (tuple.getLong("id") == 4);
assert (tuple.getLong("Field_i") == 11);
assert (tuple.get("Str_s").equals("b"));
tuple = tuples.get(6);
assert (tuple.getLong("id") == 2);
assert (tuple.getLong("Field_i") == 8);
assert (tuple.get("Str_s").equals("b"));
tuple = tuples.get(7);
assert (tuple.getLong("id") == 1);
assert (tuple.getLong("Field_i") == 7);
assert (tuple.get("Str_s").equals("a"));
// TODO get sum(Field_i) as named one
sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select Str_s, sum(Field_i) from collection1 where id='(1 8)' group by Str_s having (sum(Field_i) = 7 OR sum(Field_i) = 60) order by sum(Field_i) desc");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 2);
tuple = tuples.get(0);
assert (tuple.get("Str_s").equals("c"));
assert (tuple.getDouble("EXPR$1") == 60);
tuple = tuples.get(1);
assert (tuple.get("Str_s").equals("a"));
assert (tuple.getDouble("EXPR$1") == 7);
sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "map_reduce", "stmt", "select Str_s, sum(Field_i) from collection1 where id='(1 8)' group by Str_s having (sum(Field_i) = 7 OR sum(Field_i) = 60) order by sum(Field_i) desc");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 2);
tuple = tuples.get(0);
assert (tuple.get("Str_s").equals("c"));
assert (tuple.getDouble("EXPR$1") == 60);
tuple = tuples.get(1);
assert (tuple.get("Str_s").equals("a"));
assert (tuple.getDouble("EXPR$1") == 7);
} finally {
delete();
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class TestSQLHandler method testParallelSelectDistinct.
private void testParallelSelectDistinct() throws Exception {
try {
CloudJettyRunner jetty = this.cloudJettys.get(0);
del("*:*");
commit();
indexr("id", "1", "text", "XXXX XXXX", "str_s", "a", "field_i", "1");
indexr("id", "2", "text", "XXXX XXXX", "str_s", "b", "field_i", "2");
indexr("id", "3", "text", "XXXX XXXX", "str_s", "a", "field_i", "20");
indexr("id", "4", "text", "XXXX XXXX", "str_s", "b", "field_i", "2");
indexr("id", "5", "text", "XXXX XXXX", "str_s", "c", "field_i", "30");
indexr("id", "6", "text", "XXXX XXXX", "str_s", "c", "field_i", "30");
indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50");
indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60");
commit();
SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s, field_i from collection1 order by str_s asc, field_i asc");
SolrStream solrStream = new SolrStream(jetty.url, sParams);
List<Tuple> tuples = getTuples(solrStream);
assert (tuples.size() == 6);
Tuple tuple;
tuple = tuples.get(0);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 1);
tuple = tuples.get(1);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 20);
tuple = tuples.get(2);
assert (tuple.get("str_s").equals("b"));
assert (tuple.getLong("field_i") == 2);
tuple = tuples.get(3);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 30);
tuple = tuples.get(4);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 50);
tuple = tuples.get(5);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 60);
//reverse the sort
sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 6);
tuple = tuples.get(0);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 60);
tuple = tuples.get(1);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 50);
tuple = tuples.get(2);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 30);
tuple = tuples.get(3);
assert (tuple.get("str_s").equals("b"));
assert (tuple.getLong("field_i") == 2);
tuple = tuples.get(4);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 20);
tuple = tuples.get(5);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 1);
//reverse the sort
sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s as myString, field_i from collection1 order by myString desc, field_i desc");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 6);
tuple = tuples.get(0);
assert (tuple.get("myString").equals("c"));
assert (tuple.getLong("field_i") == 60);
tuple = tuples.get(1);
assert (tuple.get("myString").equals("c"));
assert (tuple.getLong("field_i") == 50);
tuple = tuples.get(2);
assert (tuple.get("myString").equals("c"));
assert (tuple.getLong("field_i") == 30);
tuple = tuples.get(3);
assert (tuple.get("myString").equals("b"));
assert (tuple.getLong("field_i") == 2);
tuple = tuples.get(4);
assert (tuple.get("myString").equals("a"));
assert (tuple.getLong("field_i") == 20);
tuple = tuples.get(5);
assert (tuple.get("myString").equals("a"));
assert (tuple.getLong("field_i") == 1);
//test with limit
sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 2);
tuple = tuples.get(0);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 60);
tuple = tuples.get(1);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 50);
// Test without a sort. Sort should be asc by default.
sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s, field_i from collection1");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 6);
tuple = tuples.get(0);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 1);
tuple = tuples.get(1);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 20);
tuple = tuples.get(2);
assert (tuple.get("str_s").equals("b"));
assert (tuple.getLong("field_i") == 2);
tuple = tuples.get(3);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 30);
tuple = tuples.get(4);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 50);
tuple = tuples.get(5);
assert (tuple.get("str_s").equals("c"));
assert (tuple.getLong("field_i") == 60);
// Test with a predicate.
sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", "aggregationMode", "map_reduce", "stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'");
solrStream = new SolrStream(jetty.url, sParams);
tuples = getTuples(solrStream);
assert (tuples.size() == 2);
tuple = tuples.get(0);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 1);
tuple = tuples.get(1);
assert (tuple.get("str_s").equals("a"));
assert (tuple.getLong("field_i") == 20);
} finally {
delete();
}
}
use of org.apache.solr.client.solrj.io.Tuple in project lucene-solr by apache.
the class FacetStream method read.
public Tuple read() throws IOException {
if (index < tuples.size() && index < bucketSizeLimit) {
Tuple tuple = tuples.get(index);
++index;
return tuple;
} else {
Map fields = new HashMap();
fields.put("EOF", true);
Tuple tuple = new Tuple(fields);
return tuple;
}
}
Aggregations