use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestTransactions method testTransactions.
public void testTransactions() throws Throwable {
// we cant use non-ramdir on windows, because this test needs to double-write.
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random(), new RAMDirectory());
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random(), new RAMDirectory());
dir1.failOn(new RandomFailure());
dir2.failOn(new RandomFailure());
dir1.setFailOnOpenInput(false);
dir2.setFailOnOpenInput(false);
// We throw exceptions in deleteFile, which creates
// leftover files:
dir1.setAssertNoUnrefencedFilesOnClose(false);
dir2.setAssertNoUnrefencedFilesOnClose(false);
initIndex(dir1);
initIndex(dir2);
TimedThread[] threads = new TimedThread[3];
int numThread = 0;
IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads);
threads[numThread++] = indexerThread;
indexerThread.start();
SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
threads[numThread++] = searcherThread1;
searcherThread1.start();
SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
threads[numThread++] = searcherThread2;
searcherThread2.start();
for (int i = 0; i < numThread; i++) threads[i].join();
for (int i = 0; i < numThread; i++) assertTrue(!threads[i].failed);
dir1.close();
dir2.close();
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestTryDelete method createIndex.
private static Directory createIndex() throws IOException {
Directory directory = new RAMDirectory();
IndexWriter writer = getWriter(directory);
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new StringField("foo", String.valueOf(i), Store.YES));
writer.addDocument(doc);
}
writer.commit();
writer.close();
return directory;
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestSloppyPhraseQuery method checkPhraseQuery.
private float checkPhraseQuery(Document doc, PhraseQuery query, int slop, int expectedNumResults) throws Exception {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
Term[] terms = query.getTerms();
int[] positions = query.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
builder.setSlop(slop);
query = builder.build();
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random(), new RAMDirectory());
RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
writer.addDocument(doc);
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
MaxFreqCollector c = new MaxFreqCollector();
searcher.search(query, c);
assertEquals("slop: " + slop + " query: " + query + " doc: " + doc + " Wrong number of hits", expectedNumResults, c.totalHits);
//QueryUtils.check(query,searcher);
writer.close();
reader.close();
ramDir.close();
// with these different tokens/distributions/lengths.. otherwise this test is very fragile.
return c.max;
}
use of org.apache.lucene.store.RAMDirectory in project geode by apache.
the class RawIndexRepositoryFactory method computeIndexRepository.
@Override
public IndexRepository computeIndexRepository(final Integer bucketId, LuceneSerializer serializer, LuceneIndexImpl index, PartitionedRegion userRegion, IndexRepository oldRepository) throws IOException {
final IndexRepository repo;
if (oldRepository != null) {
oldRepository.cleanup();
}
LuceneRawIndex indexForRaw = (LuceneRawIndex) index;
BucketRegion dataBucket = getMatchingBucket(userRegion, bucketId);
Directory dir = null;
if (indexForRaw.withPersistence()) {
String bucketLocation = LuceneServiceImpl.getUniqueIndexName(index.getName(), index.getRegionPath() + "_" + bucketId);
File location = new File(index.getName(), bucketLocation);
if (!location.exists()) {
location.mkdirs();
}
dir = new NIOFSDirectory(location.toPath());
} else {
dir = new RAMDirectory();
}
IndexWriterConfig config = new IndexWriterConfig(indexForRaw.getAnalyzer());
IndexWriter writer = new IndexWriter(dir, config);
return new IndexRepositoryImpl(null, writer, serializer, indexForRaw.getIndexStats(), dataBucket, null, "");
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestKeywordAnalyzer method testMutipleDocument.
/*
public void testPerFieldAnalyzer() throws Exception {
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
QueryParser queryParser = new QueryParser("description", analyzer);
Query query = queryParser.parse("partnum:Q36 AND SPACE");
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("Q36 kept as-is",
"+partnum:Q36 +space", query.toString("description"));
assertEquals("doc found!", 1, hits.length);
}
*/
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
Analyzer analyzer = new KeywordAnalyzer();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
Document doc = new Document();
doc.add(new TextField("partnum", "Q36", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("partnum", "Q37", Field.Store.YES));
writer.addDocument(doc);
writer.close();
IndexReader reader = DirectoryReader.open(dir);
PostingsEnum td = TestUtil.docs(random(), reader, "partnum", new BytesRef("Q36"), null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
td = TestUtil.docs(random(), reader, "partnum", new BytesRef("Q37"), null, 0);
assertTrue(td.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
analyzer.close();
}
Aggregations