use of org.apache.lucene.index.DirectoryReader in project lucene-solr by apache.
the class DirectUpdateHandler2 method mergeIndexes.
@Override
public int mergeIndexes(MergeIndexesCommand cmd) throws IOException {
mergeIndexesCommands.mark();
int rc;
log.info("start " + cmd);
List<DirectoryReader> readers = cmd.readers;
if (readers != null && readers.size() > 0) {
List<CodecReader> mergeReaders = new ArrayList<>();
for (DirectoryReader reader : readers) {
for (LeafReaderContext leaf : reader.leaves()) {
mergeReaders.add(SlowCodecReaderWrapper.wrap(leaf.reader()));
}
}
RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
try {
iw.get().addIndexes(mergeReaders.toArray(new CodecReader[mergeReaders.size()]));
} finally {
iw.decref();
}
rc = 1;
} else {
rc = 0;
}
log.info("end_mergeIndexes");
// TODO: consider soft commit issues
if (rc == 1 && commitTracker.getTimeUpperBound() > 0) {
commitTracker.scheduleCommitWithin(commitTracker.getTimeUpperBound());
} else if (rc == 1 && softCommitTracker.getTimeUpperBound() > 0) {
softCommitTracker.scheduleCommitWithin(softCommitTracker.getTimeUpperBound());
}
return rc;
}
use of org.apache.lucene.index.DirectoryReader in project lucene-solr by apache.
the class TestBooleanSimilarity method testPhraseScoreIsEqualToBoost.
public void testPhraseScoreIsEqualToBoost() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig().setSimilarity(new BooleanSimilarity()));
Document doc = new Document();
doc.add(new TextField("foo", "bar baz quux", Store.NO));
w.addDocument(doc);
DirectoryReader reader = w.getReader();
w.close();
IndexSearcher searcher = newSearcher(reader);
searcher.setSimilarity(new BooleanSimilarity());
PhraseQuery query = new PhraseQuery(2, "foo", "bar", "quux");
TopDocs topDocs = searcher.search(query, 2);
assertEquals(1, topDocs.totalHits);
assertEquals(1f, topDocs.scoreDocs[0].score, 0f);
topDocs = searcher.search(new BoostQuery(query, 7), 2);
assertEquals(1, topDocs.totalHits);
assertEquals(7f, topDocs.scoreDocs[0].score, 0f);
reader.close();
dir.close();
}
use of org.apache.lucene.index.DirectoryReader in project lucene-solr by apache.
the class TestCachedOrdinalsReader method testWithThreads.
@Test
public void testWithThreads() throws Exception {
// LUCENE-5303: OrdinalsCache used the ThreadLocal BinaryDV instead of reader.getCoreCacheKey().
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(indexDir, conf);
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(new FacetField("A", "1"));
writer.addDocument(config.build(taxoWriter, doc));
doc = new Document();
doc.add(new FacetField("A", "2"));
writer.addDocument(config.build(taxoWriter, doc));
final DirectoryReader reader = DirectoryReader.open(writer);
final CachedOrdinalsReader ordsReader = new CachedOrdinalsReader(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME));
Thread[] threads = new Thread[3];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread("CachedOrdsThread-" + i) {
@Override
public void run() {
for (LeafReaderContext context : reader.leaves()) {
try {
ordsReader.getReader(context);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
};
}
long ramBytesUsed = 0;
for (Thread t : threads) {
t.start();
t.join();
if (ramBytesUsed == 0) {
ramBytesUsed = ordsReader.ramBytesUsed();
} else {
assertEquals(ramBytesUsed, ordsReader.ramBytesUsed());
}
}
writer.close();
IOUtils.close(taxoWriter, reader, indexDir, taxoDir);
}
use of org.apache.lucene.index.DirectoryReader in project lucene-solr by apache.
the class TestOrdinalMappingLeafReader method verifyResults.
private void verifyResults(Directory indexDir, Directory taxoDir) throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
FacetsCollector collector = new FacetsCollector();
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, collector);
// tag facets
Facets tagFacets = new FastTaxonomyFacetCounts("$tags", taxoReader, facetConfig, collector);
FacetResult result = tagFacets.getTopChildren(10, "tag");
for (LabelAndValue lv : result.labelValues) {
if (VERBOSE) {
System.out.println(lv);
}
assertEquals(NUM_DOCS, lv.value.intValue());
}
// id facets
Facets idFacets = new FastTaxonomyFacetCounts(taxoReader, facetConfig, collector);
FacetResult idResult = idFacets.getTopChildren(10, "id");
assertEquals(NUM_DOCS, idResult.childCount);
// each "id" appears twice
assertEquals(NUM_DOCS * 2, idResult.value);
BinaryDocValues bdv = MultiDocValues.getBinaryValues(indexReader, "bdv");
BinaryDocValues cbdv = MultiDocValues.getBinaryValues(indexReader, "cbdv");
for (int i = 0; i < indexReader.maxDoc(); i++) {
assertEquals(i, bdv.nextDoc());
assertEquals(i, cbdv.nextDoc());
assertEquals(Integer.parseInt(cbdv.binaryValue().utf8ToString()), Integer.parseInt(bdv.binaryValue().utf8ToString()) * 2);
}
IOUtils.close(indexReader, taxoReader);
}
use of org.apache.lucene.index.DirectoryReader in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testSeparateIndexedFields.
public void testSeparateIndexedFields() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
config.setIndexFieldName("b", "$b");
for (int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
doc.add(new StringField("f", "v", Field.Store.NO));
doc.add(new FacetField("a", "1"));
doc.add(new FacetField("b", "1"));
iw.addDocument(config.build(taxoWriter, doc));
}
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, sfc);
Facets facets2 = getTaxonomyFacetCounts(taxoReader, config, sfc, "$b");
assertEquals(r.maxDoc(), facets1.getTopChildren(10, "a").value.intValue());
assertEquals(r.maxDoc(), facets2.getTopChildren(10, "b").value.intValue());
iw.close();
IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir);
}
Aggregations