use of org.apache.solr.client.solrj.io.eval.BooleanEvaluator in project lucene-solr by apache.
the class SolrTable method handleGroupByMapReduce.
private TupleStream handleGroupByMapReduce(String zk, String collection, Properties properties, final List<Map.Entry<String, Class>> fields, final String query, final List<Pair<String, String>> orders, final List<String> _buckets, final List<Pair<String, String>> metricPairs, final String limit, final String havingPredicate) throws IOException {
Map<String, Class> fmap = new HashMap();
for (Map.Entry<String, Class> entry : fields) {
fmap.put(entry.getKey(), entry.getValue());
}
int numWorkers = Integer.parseInt(properties.getProperty("numWorkers", "1"));
Bucket[] buckets = buildBuckets(_buckets, fields);
Metric[] metrics = buildMetrics(metricPairs, false).toArray(new Metric[0]);
if (metrics.length == 0) {
return handleSelectDistinctMapReduce(zk, collection, properties, fields, query, orders, buckets, limit);
} else {
for (Metric metric : metrics) {
Class c = fmap.get(metric.getIdentifier());
if (Long.class.equals(c)) {
metric.outputLong = true;
}
}
}
Set<String> fieldSet = getFieldSet(metrics, fields);
if (metrics.length == 0) {
throw new IOException("Group by queries must include atleast one aggregate function.");
}
String fl = getFields(fieldSet);
String sortDirection = getSortDirection(orders);
String sort = bucketSort(buckets, sortDirection);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(CommonParams.FL, fl);
params.set(CommonParams.Q, query);
params.set(CommonParams.WT, CommonParams.JAVABIN);
//Always use the /export handler for Group By Queries because it requires exporting full result sets.
params.set(CommonParams.QT, "/export");
if (numWorkers > 1) {
params.set("partitionKeys", getPartitionKeys(buckets));
}
params.set(SORT, sort);
TupleStream tupleStream = null;
CloudSolrStream cstream = new CloudSolrStream(zk, collection, params);
tupleStream = new RollupStream(cstream, buckets, metrics);
StreamFactory factory = new StreamFactory().withFunctionName("search", CloudSolrStream.class).withFunctionName("parallel", ParallelStream.class).withFunctionName("rollup", RollupStream.class).withFunctionName("sum", SumMetric.class).withFunctionName("min", MinMetric.class).withFunctionName("max", MaxMetric.class).withFunctionName("avg", MeanMetric.class).withFunctionName("count", CountMetric.class).withFunctionName("and", AndEvaluator.class).withFunctionName("or", OrEvaluator.class).withFunctionName("not", NotEvaluator.class).withFunctionName("eq", EqualsEvaluator.class).withFunctionName("gt", GreaterThanEvaluator.class).withFunctionName("lt", LessThanEvaluator.class).withFunctionName("val", RawValueEvaluator.class).withFunctionName("lteq", LessThanEqualToEvaluator.class).withFunctionName("having", HavingStream.class).withFunctionName("gteq", GreaterThanEqualToEvaluator.class);
if (havingPredicate != null) {
BooleanEvaluator booleanOperation = (BooleanEvaluator) factory.constructEvaluator(StreamExpressionParser.parse(havingPredicate));
tupleStream = new HavingStream(tupleStream, booleanOperation);
}
if (numWorkers > 1) {
// Do the rollups in parallel
// Maintain the sort of the Tuples coming from the workers.
StreamComparator comp = bucketSortComp(buckets, sortDirection);
ParallelStream parallelStream = new ParallelStream(zk, collection, tupleStream, numWorkers, comp);
parallelStream.setStreamFactory(factory);
tupleStream = parallelStream;
}
if (orders != null && orders.size() > 0) {
if (!sortsEqual(buckets, sortDirection, orders)) {
int lim = (limit == null) ? 100 : Integer.parseInt(limit);
StreamComparator comp = getComp(orders);
//Rank the Tuples
//If parallel stream is used ALL the Rolled up tuples from the workers will be ranked
//Providing a true Top or Bottom.
tupleStream = new RankStream(tupleStream, lim, comp);
} else {
// Only need to limit the result, not Rank the result
if (limit != null) {
tupleStream = new LimitStream(tupleStream, Integer.parseInt(limit));
}
}
} else {
//No order by, check for limit
if (limit != null) {
tupleStream = new LimitStream(tupleStream, Integer.parseInt(limit));
}
}
return tupleStream;
}
use of org.apache.solr.client.solrj.io.eval.BooleanEvaluator in project lucene-solr by apache.
the class SolrTable method handleGroupByFacet.
private TupleStream handleGroupByFacet(String zkHost, String collection, final List<Map.Entry<String, Class>> fields, final String query, final List<Pair<String, String>> orders, final List<String> bucketFields, final List<Pair<String, String>> metricPairs, final String lim, final String havingPredicate) throws IOException {
Map<String, Class> fmap = new HashMap();
for (Map.Entry<String, Class> f : fields) {
fmap.put(f.getKey(), f.getValue());
}
ModifiableSolrParams solrParams = new ModifiableSolrParams();
solrParams.add(CommonParams.Q, query);
Bucket[] buckets = buildBuckets(bucketFields, fields);
Metric[] metrics = buildMetrics(metricPairs, true).toArray(new Metric[0]);
if (metrics.length == 0) {
metrics = new Metric[1];
metrics[0] = new CountMetric();
} else {
for (Metric metric : metrics) {
Class c = fmap.get(metric.getIdentifier());
if (Long.class.equals(c)) {
metric.outputLong = true;
}
}
}
int limit = lim != null ? Integer.parseInt(lim) : 1000;
FieldComparator[] sorts = null;
if (orders == null || orders.size() == 0) {
sorts = new FieldComparator[buckets.length];
for (int i = 0; i < sorts.length; i++) {
sorts[i] = new FieldComparator("index", ComparatorOrder.ASCENDING);
}
} else {
sorts = getComps(orders);
}
int overfetch = (int) (limit * 1.25);
TupleStream tupleStream = new FacetStream(zkHost, collection, solrParams, buckets, metrics, sorts, overfetch);
StreamFactory factory = new StreamFactory().withFunctionName("search", CloudSolrStream.class).withFunctionName("parallel", ParallelStream.class).withFunctionName("rollup", RollupStream.class).withFunctionName("sum", SumMetric.class).withFunctionName("min", MinMetric.class).withFunctionName("max", MaxMetric.class).withFunctionName("avg", MeanMetric.class).withFunctionName("count", CountMetric.class).withFunctionName("and", AndEvaluator.class).withFunctionName("or", OrEvaluator.class).withFunctionName("not", NotEvaluator.class).withFunctionName("eq", EqualsEvaluator.class).withFunctionName("val", RawValueEvaluator.class).withFunctionName("gt", GreaterThanEvaluator.class).withFunctionName("lt", LessThanEvaluator.class).withFunctionName("lteq", LessThanEqualToEvaluator.class).withFunctionName("gteq", GreaterThanEqualToEvaluator.class);
if (havingPredicate != null) {
BooleanEvaluator booleanOperation = (BooleanEvaluator) factory.constructEvaluator(StreamExpressionParser.parse(havingPredicate));
tupleStream = new HavingStream(tupleStream, booleanOperation);
}
if (lim != null) {
tupleStream = new LimitStream(tupleStream, limit);
}
return tupleStream;
}
Aggregations