use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestPackedInts method testSave.
public void testSave() throws IOException {
final int valueCount = TestUtil.nextInt(random(), 1, 2048);
for (int bpv = 1; bpv <= 64; ++bpv) {
final int maxValue = (int) Math.min(PackedInts.maxValue(31), PackedInts.maxValue(bpv));
final RAMDirectory directory = new RAMDirectory();
List<PackedInts.Mutable> packedInts = createPackedInts(valueCount, bpv);
for (PackedInts.Mutable mutable : packedInts) {
for (int i = 0; i < mutable.size(); ++i) {
mutable.set(i, random().nextInt(maxValue));
}
IndexOutput out = directory.createOutput("packed-ints.bin", IOContext.DEFAULT);
mutable.save(out);
out.close();
IndexInput in = directory.openInput("packed-ints.bin", IOContext.DEFAULT);
PackedInts.Reader reader = PackedInts.getReader(in);
assertEquals(valueCount, reader.size());
if (mutable instanceof Packed64SingleBlock) {
// make sure that we used the right format so that the reader has
// the same performance characteristics as the mutable that has been
// serialized
assertTrue(reader instanceof Packed64SingleBlock);
} else {
assertFalse(reader instanceof Packed64SingleBlock);
}
for (int i = 0; i < valueCount; ++i) {
assertEquals(mutable.get(i), reader.get(i));
}
in.close();
directory.deleteFile("packed-ints.bin");
}
directory.close();
}
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestMemoryIndexAgainstRAMDir method assertAgainstRAMDirectory.
/**
* Build a randomish document for both RAMDirectory and MemoryIndex,
* and run all the queries against it.
*/
public void assertAgainstRAMDirectory(MemoryIndex memory) throws Exception {
memory.reset();
StringBuilder fooField = new StringBuilder();
StringBuilder termField = new StringBuilder();
// add up to 250 terms to field "foo"
final int numFooTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numFooTerms; i++) {
fooField.append(" ");
fooField.append(randomTerm());
}
// add up to 250 terms to field "term"
final int numTermTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numTermTerms; i++) {
termField.append(" ");
termField.append(randomTerm());
}
Directory ramdir = new RAMDirectory();
Analyzer analyzer = randomAnalyzer();
IndexWriter writer = new IndexWriter(ramdir, new IndexWriterConfig(analyzer).setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
Document doc = new Document();
Field field1 = newTextField("foo", fooField.toString(), Field.Store.NO);
Field field2 = newTextField("term", termField.toString(), Field.Store.NO);
doc.add(field1);
doc.add(field2);
writer.addDocument(doc);
writer.close();
memory.addField("foo", fooField.toString(), analyzer);
memory.addField("term", termField.toString(), analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
DirectoryReader competitor = DirectoryReader.open(ramdir);
duellReaders(competitor, reader);
IOUtils.close(reader, competitor);
assertAllQueries(memory, ramdir, analyzer);
ramdir.close();
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class TestDirectoryTaxonomyReader method testOpenIfChangedAndRefCount.
@Test
public void testOpenIfChangedAndRefCount() throws Exception {
// no need for random directories here
Directory dir = new RAMDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
taxoWriter.addCategory(new FacetLabel("a"));
taxoWriter.commit();
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(dir);
assertEquals("wrong refCount", 1, taxoReader.getRefCount());
taxoReader.incRef();
assertEquals("wrong refCount", 2, taxoReader.getRefCount());
taxoWriter.addCategory(new FacetLabel("a", "b"));
taxoWriter.commit();
TaxonomyReader newtr = TaxonomyReader.openIfChanged(taxoReader);
assertNotNull(newtr);
taxoReader.close();
taxoReader = newtr;
assertEquals("wrong refCount", 1, taxoReader.getRefCount());
taxoWriter.close();
taxoReader.close();
dir.close();
}
use of org.apache.lucene.store.RAMDirectory in project lucene-solr by apache.
the class PayloadHelper method setUp.
/**
* Sets up a RAMDirectory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
* and analyzes them using the PayloadAnalyzer
* @param similarity The Similarity class to use in the Searcher
* @param numDocs The num docs to add
* @return An IndexSearcher
*/
// TODO: randomize
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
PayloadAnalyzer analyzer = new PayloadAnalyzer();
// TODO randomize this
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(analyzer).setSimilarity(similarity));
// writer.infoStream = System.out;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new TextField(FIELD, English.intToEnglish(i), Field.Store.YES));
doc.add(new TextField(MULTI_FIELD, English.intToEnglish(i) + " " + English.intToEnglish(i), Field.Store.YES));
doc.add(new TextField(NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES));
writer.addDocument(doc);
}
writer.forceMerge(1);
reader = DirectoryReader.open(writer);
writer.close();
IndexSearcher searcher = LuceneTestCase.newSearcher(LuceneTestCase.getOnlyLeafReader(reader));
searcher.setSimilarity(similarity);
return searcher;
}
use of org.apache.lucene.store.RAMDirectory in project gerrit by GerritCodeReview.
the class DocIndexer method index.
private RAMDirectory index() throws IOException, UnsupportedEncodingException, FileNotFoundException {
RAMDirectory directory = new RAMDirectory();
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer(CharArraySet.EMPTY_SET));
config.setOpenMode(OpenMode.CREATE);
config.setCommitOnClose(true);
try (IndexWriter iwriter = new IndexWriter(directory, config)) {
for (String inputFile : inputFiles) {
File file = new File(inputFile);
if (file.length() == 0) {
continue;
}
String title;
try (BufferedReader titleReader = new BufferedReader(new InputStreamReader(Files.newInputStream(file.toPath()), UTF_8))) {
title = titleReader.readLine();
if (title != null && title.startsWith("[[")) {
// Generally the first line of the txt is the title. In a few cases the
// first line is a "[[tag]]" and the second line is the title.
title = titleReader.readLine();
}
}
Matcher matcher = SECTION_HEADER.matcher(title);
if (matcher.matches()) {
title = matcher.group(1);
}
String outputFile = AsciiDoctor.mapInFileToOutFile(inputFile, inExt, outExt);
try (FileReader reader = new FileReader(file)) {
Document doc = new Document();
doc.add(new TextField(Constants.DOC_FIELD, reader));
doc.add(new StringField(Constants.URL_FIELD, prefix + outputFile, Field.Store.YES));
doc.add(new TextField(Constants.TITLE_FIELD, title, Field.Store.YES));
iwriter.addDocument(doc);
}
}
}
return directory;
}
Aggregations