use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestBackwardsCompatibility method testUpgradeThenMultipleCommits.
// LUCENE-5907
public void testUpgradeThenMultipleCommits() throws Exception {
for (String name : oldNames) {
Directory dir = newDirectory(oldIndexDirs.get(name));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.addDocument(new Document());
writer.commit();
writer.addDocument(new Document());
writer.commit();
writer.close();
dir.close();
}
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestBackwardsCompatibility method testCreateIndexWithDocValuesUpdates.
// Creates an index with DocValues updates
public void testCreateIndexWithDocValuesUpdates() throws Exception {
Path indexDir = getIndexDir().resolve("dvupdates");
Files.deleteIfExists(indexDir);
Directory dir = newFSDirectory(indexDir);
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf);
// create an index w/ few doc-values fields, some with updates and some without
for (int i = 0; i < 30; i++) {
Document doc = new Document();
doc.add(new StringField("id", "" + i, Field.Store.NO));
doc.add(new NumericDocValuesField("ndv1", i));
doc.add(new NumericDocValuesField("ndv1_c", i * 2));
doc.add(new NumericDocValuesField("ndv2", i * 3));
doc.add(new NumericDocValuesField("ndv2_c", i * 6));
doc.add(new BinaryDocValuesField("bdv1", toBytes(i)));
doc.add(new BinaryDocValuesField("bdv1_c", toBytes(i * 2)));
doc.add(new BinaryDocValuesField("bdv2", toBytes(i * 3)));
doc.add(new BinaryDocValuesField("bdv2_c", toBytes(i * 6)));
writer.addDocument(doc);
if ((i + 1) % 10 == 0) {
// flush every 10 docs
writer.commit();
}
}
// first segment: no updates
// second segment: update two fields, same gen
updateNumeric(writer, "10", "ndv1", "ndv1_c", 100L);
updateBinary(writer, "11", "bdv1", "bdv1_c", 100L);
writer.commit();
// third segment: update few fields, different gens, few docs
updateNumeric(writer, "20", "ndv1", "ndv1_c", 100L);
updateBinary(writer, "21", "bdv1", "bdv1_c", 100L);
writer.commit();
// update the field again
updateNumeric(writer, "22", "ndv1", "ndv1_c", 200L);
writer.commit();
writer.close();
dir.close();
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestBackwardsCompatibility method changeIndexWithAdds.
public void changeIndexWithAdds(Random random, Directory dir, Version nameVersion) throws IOException {
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
assertEquals(nameVersion, infos.getCommitLuceneVersion());
assertEquals(nameVersion, infos.getMinSegmentLuceneVersion());
// open writer
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
// add 10 docs
for (int i = 0; i < 10; i++) {
addDoc(writer, 35 + i);
}
// make sure writer sees right total -- writer seems not to know about deletes in .del?
final int expected = 45;
assertEquals("wrong doc count", expected, writer.numDocs());
writer.close();
// make sure searching sees right # hits
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
Document d = searcher.getIndexReader().document(hits[0].doc);
assertEquals("wrong first document", "0", d.get("id"));
doTestHits(hits, 44, searcher.getIndexReader());
reader.close();
// fully merge
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 44, hits.length);
d = searcher.doc(hits[0].doc);
doTestHits(hits, 44, searcher.getIndexReader());
assertEquals("wrong first document", "0", d.get("id"));
reader.close();
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestBackwardsCompatibility method testDocValuesUpdates.
public void testDocValuesUpdates() throws Exception {
Path oldIndexDir = createTempDir("dvupdates");
TestUtil.unzip(getDataInputStream(dvUpdatesIndex), oldIndexDir);
Directory dir = newFSDirectory(oldIndexDir);
verifyUsesDefaultCodec(dir, dvUpdatesIndex);
verifyDocValues(dir);
// update fields and verify index
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
updateNumeric(writer, "1", "ndv1", "ndv1_c", 300L);
updateNumeric(writer, "1", "ndv2", "ndv2_c", 300L);
updateBinary(writer, "1", "bdv1", "bdv1_c", 300L);
updateBinary(writer, "1", "bdv2", "bdv2_c", 300L);
writer.commit();
verifyDocValues(dir);
// merge all segments
writer.forceMerge(1);
writer.commit();
verifyDocValues(dir);
writer.close();
dir.close();
}
use of org.apache.lucene.analysis.MockAnalyzer in project lucene-solr by apache.
the class TestBackwardsCompatibility method testCreateMoreTermsIndex.
public void testCreateMoreTermsIndex() throws Exception {
Path indexDir = getIndexDir().resolve("moreterms");
Files.deleteIfExists(indexDir);
Directory dir = newFSDirectory(indexDir);
LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy();
mp.setNoCFSRatio(1.0);
mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
// TODO: remove randomness
IndexWriterConfig conf = new IndexWriterConfig(analyzer).setMergePolicy(mp).setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, conf);
LineFileDocs docs = new LineFileDocs(null);
for (int i = 0; i < 50; i++) {
writer.addDocument(docs.nextDoc());
}
docs.close();
writer.close();
dir.close();
// Gives you time to copy the index out!: (there is also
// a test option to not remove temp dir...):
Thread.sleep(100000);
}
Aggregations