use of org.apache.solr.client.solrj.io.stream.metrics.Metric in project lucene-solr by apache.
the class TimeSeriesStream method toExpression.
@Override
public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
// function name
StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
// collection
expression.addParameter(collection);
// parameters
ModifiableSolrParams tmpParams = new ModifiableSolrParams(params);
for (Entry<String, String[]> param : tmpParams.getMap().entrySet()) {
expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), String.join(",", param.getValue())));
}
// metrics
for (Metric metric : metrics) {
expression.addParameter(metric.toExpression(factory));
}
expression.addParameter(new StreamExpressionNamedParameter("start", start));
expression.addParameter(new StreamExpressionNamedParameter("end", end));
expression.addParameter(new StreamExpressionNamedParameter("gap", gap));
expression.addParameter(new StreamExpressionNamedParameter("field", gap));
expression.addParameter(new StreamExpressionNamedParameter("format", format));
// zkHost
expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
return expression;
}
use of org.apache.solr.client.solrj.io.stream.metrics.Metric in project lucene-solr by apache.
the class TimeSeriesStream method appendJson.
private void appendJson(StringBuilder buf, Metric[] _metrics, String field, String start, String end, String gap) {
buf.append('"');
buf.append("timeseries");
buf.append('"');
buf.append(":{");
buf.append("\"type\":\"range\"");
buf.append(",\"field\":\"" + field + "\"");
buf.append(",\"start\":\"" + start + "\"");
buf.append(",\"end\":\"" + end + "\"");
buf.append(",\"gap\":\"" + gap + "\"");
buf.append(",\"facet\":{");
int metricCount = 0;
for (Metric metric : _metrics) {
String identifier = metric.getIdentifier();
if (!identifier.startsWith("count(")) {
if (metricCount > 0) {
buf.append(",");
}
buf.append("\"facet_" + metricCount + "\":\"" + identifier + "\"");
++metricCount;
}
}
buf.append("}}");
}
use of org.apache.solr.client.solrj.io.stream.metrics.Metric in project lucene-solr by apache.
the class StreamExpressionToExpessionTest method testMaxMetric.
@Test
public void testMaxMetric() throws Exception {
Metric metric;
String expressionString;
// Basic test
metric = new MaxMetric(StreamExpressionParser.parse("max(foo)"), factory);
expressionString = metric.toExpression(factory).toString();
assertEquals("max(foo)", expressionString);
}
use of org.apache.solr.client.solrj.io.stream.metrics.Metric in project lucene-solr by apache.
the class StreamingTest method testParallelRollupStream.
@Test
public void testParallelRollupStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "partitionKeys", "a_s");
CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParamsA);
Bucket[] buckets = { new Bucket("a_s") };
Metric[] metrics = { new SumMetric("a_i"), new SumMetric("a_f"), new MinMetric("a_i"), new MinMetric("a_f"), new MaxMetric("a_i"), new MaxMetric("a_f"), new MeanMetric("a_i"), new MeanMetric("a_f"), new CountMetric() };
RollupStream rollupStream = new RollupStream(stream, buckets, metrics);
ParallelStream parallelStream = parallelStream(rollupStream, new FieldComparator("a_s", ComparatorOrder.ASCENDING));
attachStreamFactory(parallelStream);
parallelStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(parallelStream);
assertEquals(3, tuples.size());
//Test Long and Double Sums
Tuple tuple = tuples.get(0);
String bucket = tuple.getString("a_s");
Double sumi = tuple.getDouble("sum(a_i)");
Double sumf = tuple.getDouble("sum(a_f)");
Double mini = tuple.getDouble("min(a_i)");
Double minf = tuple.getDouble("min(a_f)");
Double maxi = tuple.getDouble("max(a_i)");
Double maxf = tuple.getDouble("max(a_f)");
Double avgi = tuple.getDouble("avg(a_i)");
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
assertEquals("hello0", bucket);
assertEquals(17, sumi.doubleValue(), 0.001);
assertEquals(18, sumf.doubleValue(), 0.001);
assertEquals(0, mini.doubleValue(), 0.001);
assertEquals(1, minf.doubleValue(), 0.001);
assertEquals(14, maxi.doubleValue(), 0.001);
assertEquals(10, maxf.doubleValue(), 0.001);
assertEquals(4.25, avgi.doubleValue(), 0.001);
assertEquals(4.5, avgf.doubleValue(), 0.001);
assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(1);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello3", bucket);
assertEquals(38, sumi.doubleValue(), 0.001);
assertEquals(26, sumf.doubleValue(), 0.001);
assertEquals(3, mini.doubleValue(), 0.001);
assertEquals(3, minf.doubleValue(), 0.001);
assertEquals(13, maxi.doubleValue(), 0.001);
assertEquals(9, maxf.doubleValue(), 0.001);
assertEquals(9.5, avgi.doubleValue(), 0.001);
assertEquals(6.5, avgf.doubleValue(), 0.001);
assertEquals(4, count.doubleValue(), 0.001);
tuple = tuples.get(2);
bucket = tuple.getString("a_s");
sumi = tuple.getDouble("sum(a_i)");
sumf = tuple.getDouble("sum(a_f)");
mini = tuple.getDouble("min(a_i)");
minf = tuple.getDouble("min(a_f)");
maxi = tuple.getDouble("max(a_i)");
maxf = tuple.getDouble("max(a_f)");
avgi = tuple.getDouble("avg(a_i)");
avgf = tuple.getDouble("avg(a_f)");
count = tuple.getDouble("count(*)");
assertEquals("hello4", bucket);
assertEquals(15, sumi.longValue());
assertEquals(11, sumf.doubleValue(), 0.001);
assertEquals(4, mini.doubleValue(), 0.001);
assertEquals(4, minf.doubleValue(), 0.001);
assertEquals(11, maxi.doubleValue(), 0.001);
assertEquals(7, maxf.doubleValue(), 0.001);
assertEquals(7.5, avgi.doubleValue(), 0.001);
assertEquals(5.5, avgf.doubleValue(), 0.001);
assertEquals(2, count.doubleValue(), 0.001);
} finally {
solrClientCache.close();
}
}
use of org.apache.solr.client.solrj.io.stream.metrics.Metric in project lucene-solr by apache.
the class StreamingTest method testStatsStream.
@Test
public void testStatsStream() throws Exception {
new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
StreamContext streamContext = new StreamContext();
SolrClientCache solrClientCache = new SolrClientCache();
streamContext.setSolrClientCache(solrClientCache);
try {
SolrParams sParamsA = mapParams("q", "*:*");
Metric[] metrics = { new SumMetric("a_i"), new SumMetric("a_f"), new MinMetric("a_i"), new MinMetric("a_f"), new MaxMetric("a_i"), new MaxMetric("a_f"), new MeanMetric("a_i"), new MeanMetric("a_f"), new CountMetric() };
StatsStream statsStream = new StatsStream(zkHost, COLLECTIONORALIAS, sParamsA, metrics);
statsStream.setStreamContext(streamContext);
List<Tuple> tuples = getTuples(statsStream);
assertEquals(1, tuples.size());
//Test Long and Double Sums
Tuple tuple = tuples.get(0);
Double sumi = tuple.getDouble("sum(a_i)");
Double sumf = tuple.getDouble("sum(a_f)");
Double mini = tuple.getDouble("min(a_i)");
Double minf = tuple.getDouble("min(a_f)");
Double maxi = tuple.getDouble("max(a_i)");
Double maxf = tuple.getDouble("max(a_f)");
Double avgi = tuple.getDouble("avg(a_i)");
Double avgf = tuple.getDouble("avg(a_f)");
Double count = tuple.getDouble("count(*)");
assertEquals(70, sumi.longValue());
assertEquals(55.0, sumf.doubleValue(), 0.01);
assertEquals(0.0, mini.doubleValue(), 0.01);
assertEquals(1.0, minf.doubleValue(), 0.01);
assertEquals(14.0, maxi.doubleValue(), 0.01);
assertEquals(10.0, maxf.doubleValue(), 0.01);
assertEquals(7.0, avgi.doubleValue(), .01);
assertEquals(5.5, avgf.doubleValue(), .001);
assertEquals(10, count.doubleValue(), .01);
} finally {
solrClientCache.close();
}
}
Aggregations