use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestNRTCachingDirectory method testNRTAndCommit.
public void testNRTAndCommit() throws Exception {
Directory dir = newDirectory();
NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0);
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
RandomIndexWriter w = new RandomIndexWriter(random(), cachedDir, conf);
final LineFileDocs docs = new LineFileDocs(random());
final int numDocs = TestUtil.nextInt(random(), 100, 400);
if (VERBOSE) {
System.out.println("TEST: numDocs=" + numDocs);
}
final List<BytesRef> ids = new ArrayList<>();
DirectoryReader r = null;
for (int docCount = 0; docCount < numDocs; docCount++) {
final Document doc = docs.nextDoc();
ids.add(new BytesRef(doc.get("docid")));
w.addDocument(doc);
if (random().nextInt(20) == 17) {
if (r == null) {
r = DirectoryReader.open(w.w);
} else {
final DirectoryReader r2 = DirectoryReader.openIfChanged(r);
if (r2 != null) {
r.close();
r = r2;
}
}
assertEquals(1 + docCount, r.numDocs());
final IndexSearcher s = newSearcher(r);
// Just make sure search can run; we can't assert
// totHits since it could be 0
TopDocs hits = s.search(new TermQuery(new Term("body", "the")), 10);
// System.out.println("tot hits " + hits.totalHits);
}
}
if (r != null) {
r.close();
}
// Close should force cache to clear since all files are sync'd
w.close();
final String[] cachedFiles = cachedDir.listCachedFiles();
for (String file : cachedFiles) {
System.out.println("FAIL: cached file " + file + " remains after sync");
}
assertEquals(0, cachedFiles.length);
r = DirectoryReader.open(dir);
for (BytesRef id : ids) {
assertEquals(1, r.docFreq(new Term("docid", id)));
}
r.close();
cachedDir.close();
docs.close();
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestNearSpansOrdered method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(newTextField(FIELD, docFields[i], Field.Store.NO));
writer.addDocument(doc);
}
writer.forceMerge(1);
reader = writer.getReader();
writer.close();
searcher = newSearcher(getOnlyLeafReader(reader));
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestSpanContainQuery method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(newTextField(field, docFields[i], Field.Store.YES));
writer.addDocument(doc);
}
writer.forceMerge(1);
reader = writer.getReader();
writer.close();
searcher = newSearcher(getOnlyLeafReader(reader));
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestLockFactory method testCustomLockFactory.
// Verify: we can provide our own LockFactory implementation, the right
// methods are called at the right time, locks are created, etc.
public void testCustomLockFactory() throws IOException {
MockLockFactory lf = new MockLockFactory();
Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory(lf));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
// add 100 documents (so that commit lock is used)
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
// Both write lock and commit lock should have been created:
assertEquals("# of unique locks created (after instantiating IndexWriter)", 1, lf.locksCreated.size());
writer.close();
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestLockFactory method testRAMDirectoryNoLocking.
// Verify: we can use the NoLockFactory with RAMDirectory w/ no
// exceptions raised:
// Verify: NoLockFactory allows two IndexWriters
public void testRAMDirectoryNoLocking() throws IOException {
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(NoLockFactory.INSTANCE));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
// required so the second open succeed
writer.commit();
// Create a 2nd IndexWriter. This is normally not allowed but it should run through since we're not
// using any locks:
IndexWriter writer2 = null;
try {
writer2 = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
} catch (Exception e) {
e.printStackTrace(System.out);
fail("Should not have hit an IOException with no locking");
}
writer.close();
if (writer2 != null) {
writer2.close();
}
}
Aggregations