use of org.apache.lucene.index.TieredMergePolicy in project lucene-solr by apache.
the class TestConfig method testDefaults.
// If defaults change, add test methods to cover each version
@Test
public void testDefaults() throws Exception {
int numDefaultsTested = 0;
int numNullDefaults = 0;
SolrConfig sc = new SolrConfig(new SolrResourceLoader(TEST_PATH().resolve("collection1")), "solrconfig-defaults.xml", null);
SolrIndexConfig sic = sc.indexConfig;
++numDefaultsTested;
assertEquals("default useCompoundFile", false, sic.useCompoundFile);
++numDefaultsTested;
assertEquals("default maxBufferedDocs", -1, sic.maxBufferedDocs);
++numDefaultsTested;
assertEquals("default ramBufferSizeMB", 100.0D, sic.ramBufferSizeMB, 0.0D);
++numDefaultsTested;
assertEquals("default writeLockTimeout", -1, sic.writeLockTimeout);
++numDefaultsTested;
assertEquals("default LockType", DirectoryFactory.LOCK_TYPE_NATIVE, sic.lockType);
++numDefaultsTested;
assertEquals("default infoStream", InfoStream.NO_OUTPUT, sic.infoStream);
++numDefaultsTested;
assertNotNull("default metrics", sic.metricsInfo);
++numDefaultsTested;
++numNullDefaults;
assertNull("default mergePolicyFactoryInfo", sic.mergePolicyFactoryInfo);
++numDefaultsTested;
++numNullDefaults;
assertNull("default mergeSchedulerInfo", sic.mergeSchedulerInfo);
++numDefaultsTested;
++numNullDefaults;
assertNull("default mergedSegmentWarmerInfo", sic.mergedSegmentWarmerInfo);
IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
IndexWriterConfig iwc = sic.toIndexWriterConfig(h.getCore());
assertNotNull("null mp", iwc.getMergePolicy());
assertTrue("mp is not TieredMergePolicy", iwc.getMergePolicy() instanceof TieredMergePolicy);
assertNotNull("null ms", iwc.getMergeScheduler());
assertTrue("ms is not CMS", iwc.getMergeScheduler() instanceof ConcurrentMergeScheduler);
assertNull("non-null mergedSegmentWarmer", iwc.getMergedSegmentWarmer());
final int numDefaultsMapped = sic.toMap(new LinkedHashMap<>()).size();
assertEquals("numDefaultsTested vs. numDefaultsMapped+numNullDefaults =" + sic.toMap(new LinkedHashMap<>()).keySet(), numDefaultsTested, numDefaultsMapped + numNullDefaults);
}
use of org.apache.lucene.index.TieredMergePolicy in project lucene-solr by apache.
the class WrapperMergePolicyFactoryTest method testProperlyInitializesWrappedMergePolicy.
public void testProperlyInitializesWrappedMergePolicy() {
final TieredMergePolicy defaultTMP = new TieredMergePolicy();
final int testMaxMergeAtOnce = defaultTMP.getMaxMergeAtOnce() * 2;
final double testMaxMergedSegmentMB = defaultTMP.getMaxMergedSegmentMB() * 10;
final MergePolicyFactoryArgs args = new MergePolicyFactoryArgs();
args.add(WrapperMergePolicyFactory.WRAPPED_PREFIX, "test");
args.add("test.class", TieredMergePolicyFactory.class.getName());
args.add("test.maxMergeAtOnce", testMaxMergeAtOnce);
args.add("test.maxMergedSegmentMB", testMaxMergedSegmentMB);
MergePolicyFactory mpf = new DefaultingWrapperMergePolicyFactory(resourceLoader, args, null) {
@Override
protected MergePolicy getDefaultWrappedMergePolicy() {
throw new IllegalStateException("Should not have reached here!");
}
};
final MergePolicy mp = mpf.getMergePolicy();
assertSame(mp.getClass(), TieredMergePolicy.class);
final TieredMergePolicy tmp = (TieredMergePolicy) mp;
assertEquals("maxMergeAtOnce", testMaxMergeAtOnce, tmp.getMaxMergeAtOnce());
assertEquals("maxMergedSegmentMB", testMaxMergedSegmentMB, tmp.getMaxMergedSegmentMB(), 0.0d);
}
use of org.apache.lucene.index.TieredMergePolicy in project lucene-solr by apache.
the class TestSearcherTaxonomyManager method testNRT.
public void testNRT() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
// Don't allow tiny maxBufferedDocs; it can make this
// test too slow:
iwc.setMaxBufferedDocs(Math.max(500, iwc.getMaxBufferedDocs()));
// MockRandom/AlcololicMergePolicy are too slow:
TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setFloorSegmentMB(.001);
iwc.setMergePolicy(tmp);
final IndexWriter w = new IndexWriter(dir, iwc);
final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
final FacetsConfig config = new FacetsConfig();
config.setMultiValued("field", true);
final AtomicBoolean stop = new AtomicBoolean();
// How many unique facets to index before stopping:
final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;
Thread indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);
final SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
Thread reopener = new Thread() {
@Override
public void run() {
while (!stop.get()) {
try {
// Sleep for up to 20 msec:
Thread.sleep(random().nextInt(20));
if (VERBOSE) {
System.out.println("TEST: reopen");
}
mgr.maybeRefresh();
if (VERBOSE) {
System.out.println("TEST: reopen done");
}
} catch (Exception ioe) {
throw new RuntimeException(ioe);
}
}
}
};
reopener.setName("reopener");
reopener.start();
indexer.setName("indexer");
indexer.start();
try {
while (!stop.get()) {
SearcherAndTaxonomy pair = mgr.acquire();
try {
//System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
FacetsCollector sfc = new FacetsCollector();
pair.searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
FacetResult result = facets.getTopChildren(10, "field");
if (pair.searcher.getIndexReader().numDocs() > 0) {
//System.out.println(pair.taxonomyReader.getSize());
assertTrue(result.childCount > 0);
assertTrue(result.labelValues.length > 0);
}
//if (VERBOSE) {
//System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
//}
} finally {
mgr.release(pair);
}
}
} finally {
indexer.join();
reopener.join();
}
if (VERBOSE) {
System.out.println("TEST: now stop");
}
w.close();
IOUtils.close(mgr, tw, taxoDir, dir);
}
use of org.apache.lucene.index.TieredMergePolicy in project lucene-solr by apache.
the class TestIDVersionPostingsFormat method testMoreThanOneDocPerIDTwoSegments.
public void testMoreThanOneDocPerIDTwoSegments() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat()));
iwc.setMergePolicy(new TieredMergePolicy());
MergeScheduler ms = iwc.getMergeScheduler();
if (ms instanceof ConcurrentMergeScheduler) {
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
protected void handleMergeException(Directory dir, Throwable exc) {
assertTrue(exc instanceof IllegalArgumentException);
}
});
}
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(makeIDField("id", 17));
w.addDocument(doc);
w.commit();
doc = new Document();
doc.add(makeIDField("id", 17));
try {
w.addDocument(doc);
w.commit();
w.forceMerge(1);
fail("didn't hit exception");
} catch (IllegalArgumentException iae) {
// expected: SMS will hit this
} catch (IOException | IllegalStateException exc) {
// expected
assertTrue(exc.getCause() instanceof IllegalArgumentException);
}
w.rollback();
dir.close();
}
Aggregations