use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testGetFacetResultsTwice.
public void testGetFacetResultsTwice() throws Exception {
// LUCENE-4893: counts were multiplied as many times as getFacetResults was called.
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(new FacetField("a", "1"));
doc.add(new FacetField("b", "1"));
iw.addDocument(config.build(taxoWriter, doc));
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
List<FacetResult> res1 = facets.getAllDims(10);
List<FacetResult> res2 = facets.getAllDims(10);
assertEquals("calling getFacetResults twice should return the .equals()=true result", res1, res2);
iw.close();
IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testCountRoot.
public void testCountRoot() throws Exception {
// LUCENE-4882: FacetsAccumulator threw NPE if a FacetRequest was defined on CP.EMPTY
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
for (int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
doc.add(new FacetField("a", "1"));
doc.add(new FacetField("b", "1"));
iw.addDocument(config.build(taxoWriter, doc));
}
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config);
for (FacetResult result : facets.getAllDims(10)) {
assertEquals(r.numDocs(), result.value.intValue());
}
iw.close();
IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testManyFacetsInOneDocument.
// LUCENE-4583: make sure if we require > 32 KB for one
// document, we don't hit exc when using Facet42DocValuesFormat
public void testManyFacetsInOneDocument() throws Exception {
assumeTrue("default Codec doesn't support huge BinaryDocValues", TestUtil.fieldSupportsHugeBinaryDocValues(FacetsConfig.DEFAULT_INDEX_FIELD_NAME));
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
FacetsConfig config = new FacetsConfig();
config.setMultiValued("dim", true);
int numLabels = TestUtil.nextInt(random(), 40000, 100000);
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
for (int i = 0; i < numLabels; i++) {
doc.add(new FacetField("dim", "" + i));
}
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim");
assertEquals(numLabels, result.labelValues.length);
Set<String> allLabels = new HashSet<>();
for (LabelAndValue labelValue : result.labelValues) {
allLabels.add(labelValue.label);
assertEquals(1, labelValue.value.intValue());
}
assertEquals(numLabels, allLabels.size());
writer.close();
IOUtils.close(searcher.getIndexReader(), taxoWriter, taxoReader, dir, taxoDir);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class IndexAndTaxonomyReplicationClientTest method newDocument.
private Document newDocument(TaxonomyWriter taxoWriter, int id) throws IOException {
Document doc = new Document();
doc.add(new FacetField("A", Integer.toString(id, 16)));
return config.build(taxoWriter, doc);
}
use of org.apache.lucene.facet.FacetField in project lucene-solr by apache.
the class TestConcurrentFacetedIndexing method testConcurrency.
public void testConcurrency() throws Exception {
final AtomicInteger numDocs = new AtomicInteger(atLeast(10000));
final Directory indexDir = newDirectory();
final Directory taxoDir = newDirectory();
final ConcurrentHashMap<String, String> values = new ConcurrentHashMap<>();
final IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(null));
final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE, newTaxoWriterCache(numDocs.get()));
final Thread[] indexThreads = new Thread[atLeast(4)];
final FacetsConfig config = new FacetsConfig();
for (int i = 0; i < 10; i++) {
config.setHierarchical("l1." + i, true);
config.setMultiValued("l1." + i, true);
}
for (int i = 0; i < indexThreads.length; i++) {
indexThreads[i] = new Thread() {
@Override
public void run() {
Random random = random();
while (numDocs.decrementAndGet() > 0) {
try {
Document doc = new Document();
// 1-3
int numCats = random.nextInt(3) + 1;
while (numCats-- > 0) {
FacetField ff = newCategory();
doc.add(ff);
FacetLabel label = new FacetLabel(ff.dim, ff.path);
// add all prefixes to values
int level = label.length;
while (level > 0) {
String s = FacetsConfig.pathToString(label.components, level);
values.put(s, s);
--level;
}
}
iw.addDocument(config.build(tw, doc));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
};
}
for (Thread t : indexThreads) t.start();
for (Thread t : indexThreads) t.join();
DirectoryTaxonomyReader tr = new DirectoryTaxonomyReader(tw);
// +1 for root category
if (values.size() + 1 != tr.getSize()) {
for (String value : values.keySet()) {
FacetLabel label = new FacetLabel(FacetsConfig.stringToPath(value));
if (tr.getOrdinal(label) == -1) {
System.out.println("FAIL: path=" + label + " not recognized");
}
}
fail("mismatch number of categories");
}
int[] parents = tr.getParallelTaxonomyArrays().parents();
for (String cat : values.keySet()) {
FacetLabel cp = new FacetLabel(FacetsConfig.stringToPath(cat));
assertTrue("category not found " + cp, tr.getOrdinal(cp) > 0);
int level = cp.length;
// for root, parent is always virtual ROOT (ord=0)
int parentOrd = 0;
FacetLabel path = null;
for (int i = 0; i < level; i++) {
path = cp.subpath(i + 1);
int ord = tr.getOrdinal(path);
assertEquals("invalid parent for cp=" + path, parentOrd, parents[ord]);
// next level should have this parent
parentOrd = ord;
}
}
iw.close();
IOUtils.close(tw, tr, taxoDir, indexDir);
}
Aggregations