use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestMultiTermsEnum method testNoTermsInField.
// LUCENE-6826
public void testNoTermsInField() throws Exception {
Directory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(new MockAnalyzer(random())));
Document document = new Document();
document.add(new StringField("deleted", "0", Field.Store.YES));
writer.addDocument(document);
DirectoryReader reader = DirectoryReader.open(writer);
writer.close();
Directory directory2 = new RAMDirectory();
writer = new IndexWriter(directory2, new IndexWriterConfig(new MockAnalyzer(random())));
List<LeafReaderContext> leaves = reader.leaves();
CodecReader[] codecReaders = new CodecReader[leaves.size()];
for (int i = 0; i < leaves.size(); i++) {
codecReaders[i] = new MigratingCodecReader((CodecReader) leaves.get(i).reader());
}
// <- bang
writer.addIndexes(codecReaders);
IOUtils.close(writer, reader, directory);
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestKeywordAnalyzer method testMutipleDocument.
/*
public void testPerFieldAnalyzer() throws Exception {
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
QueryParser queryParser = new QueryParser("description", analyzer);
Query query = queryParser.parse("partnum:Q36 AND SPACE");
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("Q36 kept as-is",
"+partnum:Q36 +space", query.toString("description"));
assertEquals("doc found!", 1, hits.length);
}
*/
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
Analyzer analyzer = new KeywordAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
Document doc = new Document();
doc.add(new TextField("partnum", "Q36", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("partnum", "Q37", Field.Store.YES));
writer.addDocument(doc);
writer.close();
IndexReader reader = DirectoryReader.open(dir);
PostingsEnum td = TestUtil.docs(random(), reader, "partnum", new BytesRef("Q36"), null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
td = TestUtil.docs(random(), reader, "partnum", new BytesRef("Q37"), null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
analyzer.close();
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class StemmerTestBase method init.
static void init(boolean ignoreCase, String affix, String... dictionaries) throws IOException, ParseException {
if (dictionaries.length == 0) {
throw new IllegalArgumentException("there must be at least one dictionary");
}
InputStream affixStream = StemmerTestBase.class.getResourceAsStream(affix);
if (affixStream == null) {
throw new FileNotFoundException("file not found: " + affix);
}
InputStream[] dictStreams = new InputStream[dictionaries.length];
for (int i = 0; i < dictionaries.length; i++) {
dictStreams[i] = StemmerTestBase.class.getResourceAsStream(dictionaries[i]);
if (dictStreams[i] == null) {
throw new FileNotFoundException("file not found: " + dictStreams[i]);
}
}
try {
Dictionary dictionary = new Dictionary(new RAMDirectory(), "dictionary", affixStream, Arrays.asList(dictStreams), ignoreCase);
stemmer = new Stemmer(dictionary);
} finally {
IOUtils.closeWhileHandlingException(affixStream);
IOUtils.closeWhileHandlingException(dictStreams);
}
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class QueryAutoStopWordAnalyzerTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
dir = new RAMDirectory();
appAnalyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(appAnalyzer));
int numDocs = 200;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
String variedFieldValue = variedFieldValues[i % variedFieldValues.length];
String repetitiveFieldValue = repetitiveFieldValues[i % repetitiveFieldValues.length];
doc.add(new TextField("variedField", variedFieldValue, Field.Store.YES));
doc.add(new TextField("repetitiveField", repetitiveFieldValue, Field.Store.YES));
writer.addDocument(doc);
}
writer.close();
reader = DirectoryReader.open(dir);
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestTransactions method testTransactions.
public void testTransactions() throws Throwable {
// we cant use non-ramdir on windows, because this test needs to double-write.
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new RAMDirectory());
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new RAMDirectory());
dir1.failOn(new RandomFailure());
dir2.failOn(new RandomFailure());
dir1.setFailOnOpenInput(false);
dir2.setFailOnOpenInput(false);
// We throw exceptions in deleteFile, which creates
// leftover files:
dir1.setAssertNoUnrefencedFilesOnClose(false);
dir2.setAssertNoUnrefencedFilesOnClose(false);
initIndex(dir1);
initIndex(dir2);
TimedThread[] threads = new TimedThread[3];
int numThread = 0;
IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads);
threads[numThread++] = indexerThread;
indexerThread.start();
SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
threads[numThread++] = searcherThread1;
searcherThread1.start();
SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
threads[numThread++] = searcherThread2;
searcherThread2.start();
for (int i = 0; i < numThread; i++) threads[i].join();
for (int i = 0; i < numThread; i++) assertTrue(!threads[i].failed);
dir1.close();
dir2.close();
}
Aggregations