use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testBasic.
public void testBasic() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
// Writes facet ords to a separate directory from the
// main index:
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
FacetsConfig config = new FacetsConfig();
config.setHierarchical("Publish Date", true);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new FacetField("Author", "Bob"));
doc.add(new FacetField("Publish Date", "2010", "10", "15"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Lisa"));
doc.add(new FacetField("Publish Date", "2010", "10", "20"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Lisa"));
doc.add(new FacetField("Publish Date", "2012", "1", "1"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Susan"));
doc.add(new FacetField("Publish Date", "2012", "1", "7"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("Author", "Frank"));
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
// Retrieve & verify results:
assertEquals("dim=Publish Date path=[] value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", facets.getTopChildren(10, "Publish Date").toString());
assertEquals("dim=Author path=[] value=5 childCount=4\n Lisa (2)\n Bob (1)\n Susan (1)\n Frank (1)\n", facets.getTopChildren(10, "Author").toString());
// Now user drills down on Publish Date/2010:
DrillDownQuery q2 = new DrillDownQuery(config);
q2.add("Publish Date", "2010");
FacetsCollector c = new FacetsCollector();
searcher.search(q2, c);
facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
assertEquals("dim=Author path=[] value=2 childCount=2\n Bob (1)\n Lisa (1)\n", facets.getTopChildren(10, "Author").toString());
assertEquals(1, facets.getSpecificValue("Author", "Lisa"));
assertNull(facets.getTopChildren(10, "Non exitent dim"));
// Smoke test PrintTaxonomyStats:
ByteArrayOutputStream bos = new ByteArrayOutputStream();
PrintTaxonomyStats.printStats(taxoReader, new PrintStream(bos, false, IOUtils.UTF_8), true);
String result = bos.toString(IOUtils.UTF_8);
assertTrue(result.indexOf("/Author: 4 immediate children; 5 total categories") != -1);
assertTrue(result.indexOf("/Publish Date: 3 immediate children; 12 total categories") != -1);
// Make sure at least a few nodes of the tree came out:
assertTrue(result.indexOf(" /1999") != -1);
assertTrue(result.indexOf(" /2012") != -1);
assertTrue(result.indexOf(" /20") != -1);
writer.close();
IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, taxoDir, dir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testReallyNoNormsForDrillDown.
public void testReallyNoNormsForDrillDown() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setSimilarity(new PerFieldSimilarityWrapper() {
final Similarity sim = new ClassicSimilarity();
@Override
public Similarity get(String name) {
assertEquals("field", name);
return sim;
}
});
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("a", "path"));
writer.addDocument(config.build(taxoWriter, doc));
writer.close();
IOUtils.close(taxoWriter, dir, taxoDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testDetectMultiValuedField.
// Make sure we catch when app didn't declare field as
// multi-valued but it was:
public void testDetectMultiValuedField() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("a", "path"));
doc.add(new FacetField("a", "path2"));
expectThrows(IllegalArgumentException.class, () -> {
config.build(taxoWriter, doc);
});
writer.close();
IOUtils.close(taxoWriter, dir, taxoDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testRandom.
public void testRandom() throws Exception {
String[] tokens = getRandomTokens(10);
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), indexDir);
DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
int numDocs = atLeast(1000);
int numDims = TestUtil.nextInt(random(), 1, 7);
List<TestDoc> testDocs = getRandomDocs(tokens, numDocs, numDims);
for (TestDoc testDoc : testDocs) {
Document doc = new Document();
doc.add(newStringField("content", testDoc.content, Field.Store.NO));
for (int j = 0; j < numDims; j++) {
if (testDoc.dims[j] != null) {
doc.add(new FacetField("dim" + j, testDoc.dims[j]));
}
}
w.addDocument(config.build(tw, doc));
}
// NRT open
IndexSearcher searcher = newSearcher(w.getReader());
// NRT open
TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
int iters = atLeast(100);
for (int iter = 0; iter < iters; iter++) {
String searchToken = tokens[random().nextInt(tokens.length)];
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
Facets facets = getTaxonomyFacetCounts(tr, config, fc);
// Slow, yet hopefully bug-free, faceting:
@SuppressWarnings({ "rawtypes", "unchecked" }) Map<String, Integer>[] expectedCounts = new HashMap[numDims];
for (int i = 0; i < numDims; i++) {
expectedCounts[i] = new HashMap<>();
}
for (TestDoc doc : testDocs) {
if (doc.content.equals(searchToken)) {
for (int j = 0; j < numDims; j++) {
if (doc.dims[j] != null) {
Integer v = expectedCounts[j].get(doc.dims[j]);
if (v == null) {
expectedCounts[j].put(doc.dims[j], 1);
} else {
expectedCounts[j].put(doc.dims[j], v.intValue() + 1);
}
}
}
}
}
List<FacetResult> expected = new ArrayList<>();
for (int i = 0; i < numDims; i++) {
List<LabelAndValue> labelValues = new ArrayList<>();
int totCount = 0;
for (Map.Entry<String, Integer> ent : expectedCounts[i].entrySet()) {
labelValues.add(new LabelAndValue(ent.getKey(), ent.getValue()));
totCount += ent.getValue();
}
sortLabelValues(labelValues);
if (totCount > 0) {
expected.add(new FacetResult("dim" + i, new String[0], totCount, labelValues.toArray(new LabelAndValue[labelValues.size()]), labelValues.size()));
}
}
// Sort by highest value, tie break by value:
sortFacetResults(expected);
List<FacetResult> actual = facets.getAllDims(10);
// Messy: fixup ties
sortTies(actual);
assertEquals(expected, actual);
}
w.close();
IOUtils.close(tw, searcher.getIndexReader(), tr, indexDir, taxoDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testSeparateIndexedFields.
public void testSeparateIndexedFields() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
config.setIndexFieldName("b", "$b");
for (int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
doc.add(new StringField("f", "v", Field.Store.NO));
doc.add(new FacetField("a", "1"));
doc.add(new FacetField("b", "1"));
iw.addDocument(config.build(taxoWriter, doc));
}
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, sfc);
Facets facets2 = getTaxonomyFacetCounts(taxoReader, config, sfc, "$b");
assertEquals(r.maxDoc(), facets1.getTopChildren(10, "a").value.intValue());
assertEquals(r.maxDoc(), facets2.getTopChildren(10, "b").value.intValue());
iw.close();
IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir);
}
Aggregations