use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class LuceneTests method testCleanIndex.
public void testCleanIndex() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new TextField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new TextField("id", "2", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("id", "3", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new TextField("id", "4", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.deleteDocuments(new Term("id", "2"));
writer.commit();
try (DirectoryReader open = DirectoryReader.open(writer)) {
assertEquals(3, open.numDocs());
assertEquals(1, open.numDeletedDocs());
assertEquals(4, open.maxDoc());
}
writer.close();
if (random().nextBoolean()) {
for (String file : dir.listAll()) {
if (file.startsWith("_1")) {
// delete a random file
dir.deleteFile(file);
break;
}
}
}
Lucene.cleanLuceneIndex(dir);
if (dir.listAll().length > 0) {
for (String file : dir.listAll()) {
if (file.startsWith("extra") == false) {
assertEquals(file, "write.lock");
}
}
}
dir.close();
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class ShardCoreKeyMapTests method testBasics.
public void testBasics() throws IOException {
Directory dir1 = newDirectory();
RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1);
w1.addDocument(new Document());
Directory dir2 = newDirectory();
RandomIndexWriter w2 = new RandomIndexWriter(random(), dir2);
w2.addDocument(new Document());
Directory dir3 = newDirectory();
RandomIndexWriter w3 = new RandomIndexWriter(random(), dir3);
w3.addDocument(new Document());
ShardId shardId1 = new ShardId("index1", "_na_", 1);
ShardId shardId2 = new ShardId("index1", "_na_", 3);
ShardId shardId3 = new ShardId("index2", "_na_", 2);
ElasticsearchDirectoryReader reader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1);
ElasticsearchDirectoryReader reader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2);
ElasticsearchDirectoryReader reader3 = ElasticsearchDirectoryReader.wrap(w3.getReader(), shardId3);
ShardCoreKeyMap map = new ShardCoreKeyMap();
for (DirectoryReader reader : Arrays.asList(reader1, reader2, reader3)) {
for (LeafReaderContext ctx : reader.leaves()) {
map.add(ctx.reader());
}
}
assertEquals(3, map.size());
// Adding them back is a no-op
for (LeafReaderContext ctx : reader1.leaves()) {
map.add(ctx.reader());
}
assertEquals(3, map.size());
for (LeafReaderContext ctx : reader2.leaves()) {
assertEquals(shardId2, map.getShardId(ctx.reader().getCoreCacheKey()));
}
w1.addDocument(new Document());
ElasticsearchDirectoryReader newReader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1);
reader1.close();
reader1 = newReader1;
// same for reader2, but with a force merge to trigger evictions
w2.addDocument(new Document());
w2.forceMerge(1);
ElasticsearchDirectoryReader newReader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2);
reader2.close();
reader2 = newReader2;
for (DirectoryReader reader : Arrays.asList(reader1, reader2, reader3)) {
for (LeafReaderContext ctx : reader.leaves()) {
map.add(ctx.reader());
}
}
final Set<Object> index1Keys = new HashSet<>();
for (DirectoryReader reader : Arrays.asList(reader1, reader2)) {
for (LeafReaderContext ctx : reader.leaves()) {
index1Keys.add(ctx.reader().getCoreCacheKey());
}
}
index1Keys.removeAll(map.getCoreKeysForIndex("index1"));
assertEquals(Collections.emptySet(), index1Keys);
reader1.close();
w1.close();
reader2.close();
w2.close();
reader3.close();
w3.close();
assertEquals(0, map.size());
dir1.close();
dir2.close();
dir3.close();
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class VersionsTests method testNestedDocuments.
public void testNestedDocuments() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
List<Document> docs = new ArrayList<>();
for (int i = 0; i < 4; ++i) {
// Nested
Document doc = new Document();
doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
docs.add(doc);
}
// Root
Document doc = new Document();
doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
NumericDocValuesField version = new NumericDocValuesField(VersionFieldMapper.NAME, 5L);
doc.add(version);
docs.add(doc);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5L));
assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5L));
version.setLongValue(6L);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
version.setLongValue(7L);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
directoryReader = reopen(directoryReader);
assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7L));
assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(7L));
writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
directoryReader = reopen(directoryReader);
assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), nullValue());
directoryReader.close();
writer.close();
dir.close();
}
use of org.apache.lucene.index.DirectoryReader in project ansj_seg by NLPchina.
the class HeightLightTest method search.
private static void search(Analyzer analyzer, Query query) throws IOException {
DirectoryReader directoryReader = DirectoryReader.open(directory);
// 查询索引
IndexSearcher isearcher = new IndexSearcher(directoryReader);
System.out.println(query);
TopDocs hits = isearcher.search(query, 5);
for (int i = 0; i < hits.scoreDocs.length; i++) {
int docId = hits.scoreDocs[i].doc;
Document document = isearcher.doc(docId);
System.out.println(toHighlighter(analyzer, query, document));
}
}
use of org.apache.lucene.index.DirectoryReader in project ansj_seg by NLPchina.
the class IndexTest method search.
private void search(Analyzer queryAnalyzer, Directory directory, String queryStr) throws CorruptIndexException, IOException, ParseException {
IndexSearcher isearcher;
DirectoryReader directoryReader = DirectoryReader.open(directory);
// 查询索引
isearcher = new IndexSearcher(directoryReader);
QueryParser tq = new QueryParser("text", queryAnalyzer);
Query query = tq.parse(queryStr);
System.out.println(query);
TopDocs hits = isearcher.search(query, 5);
System.out.println(queryStr + ":共找到" + hits.totalHits + "条记录!");
for (int i = 0; i < hits.scoreDocs.length; i++) {
int docId = hits.scoreDocs[i].doc;
Document document = isearcher.doc(docId);
System.out.println(toHighlighter(queryAnalyzer, query, document));
}
}
Aggregations