use of org.apache.lucene.util.LineFileDocs in project lucene-solr by apache.
the class TestSuggestField method testRealisticKeys.
@Test
public void testRealisticKeys() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwcWithSuggestField(analyzer, "suggest_field"));
LineFileDocs lineFileDocs = new LineFileDocs(random());
int num = Math.min(1000, atLeast(100));
Map<String, Integer> mappings = new HashMap<>();
for (int i = 0; i < num; i++) {
Document document = lineFileDocs.nextDoc();
String title = document.getField("title").stringValue();
int weight = random().nextInt(Integer.MAX_VALUE);
Integer prevWeight = mappings.get(title);
if (prevWeight == null || prevWeight < weight) {
mappings.put(title, weight);
}
Document doc = new Document();
doc.add(new SuggestField("suggest_field", title, weight));
iw.addDocument(doc);
if (rarely()) {
iw.commit();
}
}
DirectoryReader reader = iw.getReader();
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
for (Map.Entry<String, Integer> entry : mappings.entrySet()) {
String title = entry.getKey();
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", title));
TopSuggestDocs suggest = indexSearcher.suggest(query, mappings.size(), false);
assertTrue(suggest.totalHits > 0);
boolean matched = false;
for (ScoreDoc scoreDoc : suggest.scoreDocs) {
matched = Float.compare(scoreDoc.score, (float) entry.getValue()) == 0;
if (matched) {
break;
}
}
assertTrue("at least one of the entries should have the score", matched);
}
reader.close();
iw.close();
}
use of org.apache.lucene.util.LineFileDocs in project lucene-solr by apache.
the class BaseTestCheckIndex method testChecksumsOnlyVerbose.
public void testChecksumsOnlyVerbose(Directory dir) throws IOException {
LineFileDocs lf = new LineFileDocs(random());
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(analyzer));
for (int i = 0; i < 100; i++) {
iw.addDocument(lf.nextDoc());
}
iw.addDocument(new Document());
iw.commit();
iw.close();
lf.close();
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
CheckIndex checker = new CheckIndex(dir);
checker.setInfoStream(new PrintStream(bos, true, IOUtils.UTF_8));
if (VERBOSE)
checker.setInfoStream(System.out);
CheckIndex.Status indexStatus = checker.checkIndex();
assertTrue(indexStatus.clean);
checker.close();
analyzer.close();
}
use of org.apache.lucene.util.LineFileDocs in project lucene-solr by apache.
the class TestIndexWriterWithThreads method testRollbackAndCommitWithThreads.
// LUCENE-4147
public void testRollbackAndCommitWithThreads() throws Exception {
final BaseDirectoryWrapper d = newDirectory();
final int threadCount = TestUtil.nextInt(random(), 2, 6);
final AtomicReference<IndexWriter> writerRef = new AtomicReference<>();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
writerRef.set(new IndexWriter(d, newIndexWriterConfig(analyzer)));
// Make initial commit so the test doesn't trip "corrupt first commit" when virus checker refuses to delete partial segments_N file:
writerRef.get().commit();
final LineFileDocs docs = new LineFileDocs(random());
final Thread[] threads = new Thread[threadCount];
final int iters = atLeast(100);
final AtomicBoolean failed = new AtomicBoolean();
final Lock rollbackLock = new ReentrantLock();
final Lock commitLock = new ReentrantLock();
for (int threadID = 0; threadID < threadCount; threadID++) {
threads[threadID] = new Thread() {
@Override
public void run() {
for (int iter = 0; iter < iters && !failed.get(); iter++) {
//final int x = random().nextInt(5);
final int x = random().nextInt(3);
try {
switch(x) {
case 0:
rollbackLock.lock();
if (VERBOSE) {
System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now rollback");
}
try {
writerRef.get().rollback();
if (VERBOSE) {
System.out.println("TEST: " + Thread.currentThread().getName() + ": rollback done; now open new writer");
}
writerRef.set(new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))));
} finally {
rollbackLock.unlock();
}
break;
case 1:
commitLock.lock();
if (VERBOSE) {
System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now commit");
}
try {
if (random().nextBoolean()) {
writerRef.get().prepareCommit();
}
writerRef.get().commit();
} catch (AlreadyClosedException | NullPointerException ace) {
// ok
} finally {
commitLock.unlock();
}
break;
case 2:
if (VERBOSE) {
System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now add");
}
try {
writerRef.get().addDocument(docs.nextDoc());
} catch (AlreadyClosedException | NullPointerException | AssertionError ace) {
// ok
}
break;
}
} catch (Throwable t) {
failed.set(true);
throw new RuntimeException(t);
}
}
}
};
threads[threadID].start();
}
for (int threadID = 0; threadID < threadCount; threadID++) {
threads[threadID].join();
}
assertTrue(!failed.get());
writerRef.get().close();
d.close();
}
use of org.apache.lucene.util.LineFileDocs in project lucene-solr by apache.
the class TestNorms method buildIndex.
// TODO: create a testNormsNotPresent ourselves by adding/deleting/merging docs
public void buildIndex(Directory dir) throws IOException {
Random random = random();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig config = newIndexWriterConfig(analyzer);
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
final LineFileDocs docs = new LineFileDocs(random);
int num = atLeast(100);
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
int boost = TestUtil.nextInt(random, 1, 255);
String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
Field f = new TextField(BYTE_TEST_FIELD, value, Field.Store.YES);
doc.add(f);
writer.addDocument(doc);
doc.removeField(BYTE_TEST_FIELD);
if (rarely()) {
writer.commit();
}
}
writer.commit();
writer.close();
docs.close();
}
use of org.apache.lucene.util.LineFileDocs in project lucene-solr by apache.
the class TestNRTReplication method testReplicateForceMerge.
@Nightly
public void testReplicateForceMerge() throws Exception {
Path primaryPath = createTempDir("primary");
NodeProcess primary = startNode(-1, 0, primaryPath, -1, false);
Path replicaPath = createTempDir("replica");
NodeProcess replica = startNode(primary.tcpPort, 1, replicaPath, -1, false);
sendReplicasToPrimary(primary, replica);
// Index 10 docs into primary:
LineFileDocs docs = new LineFileDocs(random());
Connection primaryC = new Connection(primary.tcpPort);
primaryC.out.writeByte(SimplePrimaryNode.CMD_INDEXING);
for (int i = 0; i < 10; i++) {
Document doc = docs.nextDoc();
primary.addOrUpdateDocument(primaryC, doc, false);
}
// Refresh primary, which also pushes to replica:
long primaryVersion1 = primary.flush(0);
assertTrue(primaryVersion1 > 0);
// Index 10 more docs into primary:
for (int i = 0; i < 10; i++) {
Document doc = docs.nextDoc();
primary.addOrUpdateDocument(primaryC, doc, false);
}
// Refresh primary, which also pushes to replica:
long primaryVersion2 = primary.flush(0);
assertTrue(primaryVersion2 > primaryVersion1);
primary.forceMerge(primaryC);
// Refresh primary, which also pushes to replica:
long primaryVersion3 = primary.flush(0);
assertTrue(primaryVersion3 > primaryVersion2);
// Wait for replica to show the change
waitForVersionAndHits(replica, primaryVersion3, 20);
primaryC.close();
replica.close();
primary.close();
}
Aggregations