use of org.apache.lucene.index.RandomIndexWriter in project lucene-solr by apache.
the class TestSuggestField method testEmpty.
@Test
public void testEmpty() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "ab"));
TopSuggestDocs lookupDocs = suggestIndexSearcher.suggest(query, 3, false);
assertThat(lookupDocs.totalHits, equalTo(0));
reader.close();
iw.close();
}
use of org.apache.lucene.index.RandomIndexWriter in project lucene-solr by apache.
the class TestSuggestField method testExtremeDeduplication.
public void testExtremeDeduplication() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
final int num = atLeast(5000);
int bestWeight = Integer.MIN_VALUE;
for (int i = 0; i < num; i++) {
Document document = new Document();
int weight = TestUtil.nextInt(random(), 10, 100);
bestWeight = Math.max(weight, bestWeight);
document.add(new SuggestField("suggest_field", "abc", weight));
iw.addDocument(document);
if (rarely()) {
iw.commit();
}
}
Document document = new Document();
document.add(new SuggestField("suggest_field", "abd", 7));
iw.addDocument(document);
if (random().nextBoolean()) {
iw.forceMerge(1);
}
DirectoryReader reader = iw.getReader();
Entry[] expectedEntries = new Entry[2];
expectedEntries[0] = new Entry("abc", bestWeight);
expectedEntries[1] = new Entry("abd", 7);
SuggestIndexSearcher suggestIndexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "a"));
TopSuggestDocsCollector collector = new TopSuggestDocsCollector(2, true);
suggestIndexSearcher.suggest(query, collector);
TopSuggestDocs lookupDocs = collector.get();
assertSuggestions(lookupDocs, expectedEntries);
reader.close();
iw.close();
}
use of org.apache.lucene.index.RandomIndexWriter in project lucene-solr by apache.
the class TestSuggestField method testSuggestOnAllDeletedDocuments.
@Test
public void testSuggestOnAllDeletedDocuments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
// using IndexWriter instead of RandomIndexWriter
IndexWriter iw = new IndexWriter(dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 0; i < num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
document.add(newStringField("delete", "delete", Field.Store.NO));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
iw.deleteDocuments(new Term("delete", "delete"));
DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, num, false);
assertThat(suggest.totalHits, equalTo(0));
reader.close();
iw.close();
}
use of org.apache.lucene.index.RandomIndexWriter in project lucene-solr by apache.
the class TestCompressingTermVectorsFormat method testNoOrds.
// https://issues.apache.org/jira/browse/LUCENE-5156
public void testNoOrds() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(new Field("foo", "this is a test", ft));
iw.addDocument(doc);
LeafReader ir = getOnlyLeafReader(iw.getReader());
Terms terms = ir.getTermVector(0, "foo");
assertNotNull(terms);
TermsEnum termsEnum = terms.iterator();
assertEquals(SeekStatus.FOUND, termsEnum.seekCeil(new BytesRef("this")));
try {
termsEnum.ord();
fail();
} catch (UnsupportedOperationException expected) {
// expected exception
}
try {
termsEnum.seekExact(0);
fail();
} catch (UnsupportedOperationException expected) {
// expected exception
}
ir.close();
iw.close();
dir.close();
}
use of org.apache.lucene.index.RandomIndexWriter in project lucene-solr by apache.
the class TestMultiMMap method assertChunking.
private void assertChunking(Random random, int chunkSize) throws Exception {
Path path = createTempDir("mmap" + chunkSize);
MMapDirectory mmapDir = new MMapDirectory(path, chunkSize);
// we will map a lot, try to turn on the unmap hack
if (MMapDirectory.UNMAP_SUPPORTED)
mmapDir.setUseUnmap(true);
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, mmapDir);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
Field docid = newStringField("docid", "0", Field.Store.YES);
Field junk = newStringField("junk", "", Field.Store.YES);
doc.add(docid);
doc.add(junk);
int numDocs = 100;
for (int i = 0; i < numDocs; i++) {
docid.setStringValue("" + i);
junk.setStringValue(TestUtil.randomUnicodeString(random));
writer.addDocument(doc);
}
IndexReader reader = writer.getReader();
writer.close();
int numAsserts = atLeast(100);
for (int i = 0; i < numAsserts; i++) {
int docID = random.nextInt(numDocs);
assertEquals("" + docID, reader.document(docID).get("docid"));
}
reader.close();
dir.close();
}
Aggregations