use of org.apache.lucene.facet.FacetsCollector in project lucene-solr by apache.
the class SimpleFacetsExample method drillDown.
/** User drills down on 'Publish Date/2010', and we
* return facets for 'Author' */
private FacetResult drillDown() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
// Passing no baseQuery means we drill down on all
// documents ("browse only"):
DrillDownQuery q = new DrillDownQuery(config);
// Now user drills down on Publish Date/2010:
q.add("Publish Date", "2010");
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, q, 10, fc);
// Retrieve results
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
FacetResult result = facets.getTopChildren(10, "Author");
indexReader.close();
taxoReader.close();
return result;
}
use of org.apache.lucene.facet.FacetsCollector in project lucene-solr by apache.
the class TestTaxonomyFacetSumValueSource method testRandom.
public void testRandom() throws Exception {
String[] tokens = getRandomTokens(10);
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), indexDir);
DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
int numDocs = atLeast(1000);
int numDims = TestUtil.nextInt(random(), 1, 7);
List<TestDoc> testDocs = getRandomDocs(tokens, numDocs, numDims);
for (TestDoc testDoc : testDocs) {
Document doc = new Document();
doc.add(newStringField("content", testDoc.content, Field.Store.NO));
testDoc.value = random().nextFloat();
doc.add(new FloatDocValuesField("value", testDoc.value));
for (int j = 0; j < numDims; j++) {
if (testDoc.dims[j] != null) {
doc.add(new FacetField("dim" + j, testDoc.dims[j]));
}
}
w.addDocument(config.build(tw, doc));
}
// NRT open
IndexSearcher searcher = newSearcher(w.getReader());
// NRT open
TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
int iters = atLeast(100);
for (int iter = 0; iter < iters; iter++) {
String searchToken = tokens[random().nextInt(tokens.length)];
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(tr, config, fc, DoubleValuesSource.fromFloatField("value"));
// Slow, yet hopefully bug-free, faceting:
@SuppressWarnings({ "rawtypes", "unchecked" }) Map<String, Float>[] expectedValues = new HashMap[numDims];
for (int i = 0; i < numDims; i++) {
expectedValues[i] = new HashMap<>();
}
for (TestDoc doc : testDocs) {
if (doc.content.equals(searchToken)) {
for (int j = 0; j < numDims; j++) {
if (doc.dims[j] != null) {
Float v = expectedValues[j].get(doc.dims[j]);
if (v == null) {
expectedValues[j].put(doc.dims[j], doc.value);
} else {
expectedValues[j].put(doc.dims[j], v + doc.value);
}
}
}
}
}
List<FacetResult> expected = new ArrayList<>();
for (int i = 0; i < numDims; i++) {
List<LabelAndValue> labelValues = new ArrayList<>();
double totValue = 0;
for (Map.Entry<String, Float> ent : expectedValues[i].entrySet()) {
labelValues.add(new LabelAndValue(ent.getKey(), ent.getValue()));
totValue += ent.getValue();
}
sortLabelValues(labelValues);
if (totValue > 0) {
expected.add(new FacetResult("dim" + i, new String[0], totValue, labelValues.toArray(new LabelAndValue[labelValues.size()]), labelValues.size()));
}
}
// Sort by highest value, tie break by value:
sortFacetResults(expected);
List<FacetResult> actual = facets.getAllDims(10);
// Messy: fixup ties
sortTies(actual);
if (VERBOSE) {
System.out.println("expected=\n" + expected.toString());
System.out.println("actual=\n" + actual.toString());
}
assertFloatValuesEquals(expected, actual);
}
w.close();
IOUtils.close(tw, searcher.getIndexReader(), tr, indexDir, taxoDir);
}
use of org.apache.lucene.facet.FacetsCollector in project lucene-solr by apache.
the class TestTaxonomyFacetSumValueSource method testSparseFacets.
// LUCENE-5333
public void testSparseFacets() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
// Writes facet ords to a separate directory from the
// main index:
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(new NumericDocValuesField("num", 10));
doc.add(new FacetField("a", "foo1"));
writer.addDocument(config.build(taxoWriter, doc));
if (random().nextBoolean()) {
writer.commit();
}
doc = new Document();
doc.add(new NumericDocValuesField("num", 20));
doc.add(new FacetField("a", "foo2"));
doc.add(new FacetField("b", "bar1"));
writer.addDocument(config.build(taxoWriter, doc));
if (random().nextBoolean()) {
writer.commit();
}
doc = new Document();
doc.add(new NumericDocValuesField("num", 30));
doc.add(new FacetField("a", "foo3"));
doc.add(new FacetField("b", "bar2"));
doc.add(new FacetField("c", "baz1"));
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
writer.close();
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, DoubleValuesSource.fromIntField("num"));
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
assertEquals(3, results.size());
assertEquals("dim=a path=[] value=60.0 childCount=3\n foo3 (30.0)\n foo2 (20.0)\n foo1 (10.0)\n", results.get(0).toString());
assertEquals("dim=b path=[] value=50.0 childCount=2\n bar2 (30.0)\n bar1 (20.0)\n", results.get(1).toString());
assertEquals("dim=c path=[] value=30.0 childCount=1\n baz1 (30.0)\n", results.get(2).toString());
IOUtils.close(searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
use of org.apache.lucene.facet.FacetsCollector in project lucene-solr by apache.
the class TestTaxonomyFacetSumValueSource method testBasic.
public void testBasic() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
// Writes facet ords to a separate directory from the
// main index:
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FacetsConfig config = new FacetsConfig();
// Reused across documents, to add the necessary facet
// fields:
Document doc = new Document();
doc.add(new NumericDocValuesField("num", 10));
doc.add(new FacetField("Author", "Bob"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new NumericDocValuesField("num", 20));
doc.add(new FacetField("Author", "Lisa"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new NumericDocValuesField("num", 30));
doc.add(new FacetField("Author", "Lisa"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new NumericDocValuesField("num", 40));
doc.add(new FacetField("Author", "Susan"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new NumericDocValuesField("num", 45));
doc.add(new FacetField("Author", "Frank"));
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
writer.close();
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
// Aggregate the facet counts:
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query and one of the
// Facets.search utility methods:
searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, DoubleValuesSource.fromIntField("num"));
// Retrieve & verify results:
assertEquals("dim=Author path=[] value=145.0 childCount=4\n Lisa (50.0)\n Frank (45.0)\n Susan (40.0)\n Bob (10.0)\n", facets.getTopChildren(10, "Author").toString());
taxoReader.close();
searcher.getIndexReader().close();
dir.close();
taxoDir.close();
}
use of org.apache.lucene.facet.FacetsCollector in project lucene-solr by apache.
the class TestTaxonomyFacetCounts2 method testAllCounts.
@Test
public void testAllCounts() throws Exception {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
FacetsCollector sfc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_A, CP_A);
assertEquals(-1, result.value.intValue());
int prevValue = Integer.MAX_VALUE;
for (LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_A + "/" + labelValue.label), labelValue.value);
assertTrue("wrong sort order of sub results: labelValue.value=" + labelValue.value + " prevValue=" + prevValue, labelValue.value.intValue() <= prevValue);
prevValue = labelValue.value.intValue();
}
result = facets.getTopChildren(NUM_CHILDREN_CP_B, CP_B);
assertEquals(allExpectedCounts.get(CP_B), result.value);
prevValue = Integer.MAX_VALUE;
for (LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_B + "/" + labelValue.label), labelValue.value);
assertTrue("wrong sort order of sub results: labelValue.value=" + labelValue.value + " prevValue=" + prevValue, labelValue.value.intValue() <= prevValue);
prevValue = labelValue.value.intValue();
}
IOUtils.close(indexReader, taxoReader);
}
Aggregations