Search in sources :

Example 6 with NRTCachingDirectory

use of org.apache.lucene.store.NRTCachingDirectory in project lucene-solr by apache.

the class HdfsDirectoryFactory method create.

@Override
protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
    assert params != null : "init must be called before create";
    LOG.info("creating directory factory for path {}", path);
    Configuration conf = getConf();
    if (metrics == null) {
        metrics = MetricsHolder.metrics;
    }
    boolean blockCacheEnabled = getConfig(BLOCKCACHE_ENABLED, true);
    boolean blockCacheGlobal = getConfig(BLOCKCACHE_GLOBAL, true);
    boolean blockCacheReadEnabled = getConfig(BLOCKCACHE_READ_ENABLED, true);
    final HdfsDirectory hdfsDir;
    final Directory dir;
    if (blockCacheEnabled && dirContext != DirContext.META_DATA) {
        int numberOfBlocksPerBank = getConfig(NUMBEROFBLOCKSPERBANK, 16384);
        int blockSize = BlockDirectory.BLOCK_SIZE;
        int bankCount = getConfig(BLOCKCACHE_SLAB_COUNT, 1);
        boolean directAllocation = getConfig(BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true);
        int slabSize = numberOfBlocksPerBank * blockSize;
        LOG.info("Number of slabs of block cache [{}] with direct memory allocation set to [{}]", bankCount, directAllocation);
        LOG.info("Block cache target memory usage, slab size of [{}] will allocate [{}] slabs and use ~[{}] bytes", new Object[] { slabSize, bankCount, ((long) bankCount * (long) slabSize) });
        int bsBufferSize = params.getInt("solr.hdfs.blockcache.bufferstore.buffersize", blockSize);
        // this is actually total size
        int bsBufferCount = params.getInt("solr.hdfs.blockcache.bufferstore.buffercount", 0);
        BlockCache blockCache = getBlockDirectoryCache(numberOfBlocksPerBank, blockSize, bankCount, directAllocation, slabSize, bsBufferSize, bsBufferCount, blockCacheGlobal);
        Cache cache = new BlockDirectoryCache(blockCache, path, metrics, blockCacheGlobal);
        int readBufferSize = params.getInt("solr.hdfs.blockcache.read.buffersize", blockSize);
        hdfsDir = new HdfsDirectory(new Path(path), lockFactory, conf, readBufferSize);
        dir = new BlockDirectory(path, hdfsDir, cache, null, blockCacheReadEnabled, false, cacheMerges, cacheReadOnce);
    } else {
        hdfsDir = new HdfsDirectory(new Path(path), conf);
        dir = hdfsDir;
    }
    if (params.getBool(LOCALITYMETRICS_ENABLED, false)) {
        LocalityHolder.reporter.registerDirectory(hdfsDir);
    }
    boolean nrtCachingDirectory = getConfig(NRTCACHINGDIRECTORY_ENABLE, true);
    if (nrtCachingDirectory) {
        double nrtCacheMaxMergeSizeMB = getConfig(NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 16);
        double nrtCacheMaxCacheMB = getConfig(NRTCACHINGDIRECTORY_MAXCACHEMB, 192);
        return new NRTCachingDirectory(dir, nrtCacheMaxMergeSizeMB, nrtCacheMaxCacheMB);
    }
    return dir;
}
Also used : Path(org.apache.hadoop.fs.Path) BlockDirectory(org.apache.solr.store.blockcache.BlockDirectory) Configuration(org.apache.hadoop.conf.Configuration) BlockDirectoryCache(org.apache.solr.store.blockcache.BlockDirectoryCache) BlockCache(org.apache.solr.store.blockcache.BlockCache) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) Directory(org.apache.lucene.store.Directory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) BlockDirectory(org.apache.solr.store.blockcache.BlockDirectory) Cache(org.apache.solr.store.blockcache.Cache) BlockDirectoryCache(org.apache.solr.store.blockcache.BlockDirectoryCache) BlockCache(org.apache.solr.store.blockcache.BlockCache) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory)

Example 7 with NRTCachingDirectory

use of org.apache.lucene.store.NRTCachingDirectory in project lucene-solr by apache.

the class TestBinaryDocValuesUpdates method testIOContext.

@Test
public void testIOContext() throws Exception {
    // LUCENE-5591: make sure we pass an IOContext with an approximate
    // segmentSize in FlushInfo
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    // we want a single large enough segment so that a doc-values update writes a large file
    conf.setMergePolicy(NoMergePolicy.INSTANCE);
    // manually flush
    conf.setMaxBufferedDocs(Integer.MAX_VALUE);
    conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
    IndexWriter writer = new IndexWriter(dir, conf);
    for (int i = 0; i < 100; i++) {
        writer.addDocument(doc(i));
    }
    writer.commit();
    writer.close();
    NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, 100, 1 / (1024. * 1024.));
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    // we want a single large enough segment so that a doc-values update writes a large file
    conf.setMergePolicy(NoMergePolicy.INSTANCE);
    // manually flush
    conf.setMaxBufferedDocs(Integer.MAX_VALUE);
    conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
    writer = new IndexWriter(cachingDir, conf);
    writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(100L));
    // flush
    DirectoryReader reader = DirectoryReader.open(writer);
    assertEquals(0, cachingDir.listCachedFiles().length);
    IOUtils.close(reader, writer, cachingDir);
}
Also used : MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) Directory(org.apache.lucene.store.Directory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) Test(org.junit.Test)

Example 8 with NRTCachingDirectory

use of org.apache.lucene.store.NRTCachingDirectory in project lucene-solr by apache.

the class TestNumericDocValuesUpdates method testIOContext.

@Test
public void testIOContext() throws Exception {
    // LUCENE-5591: make sure we pass an IOContext with an approximate
    // segmentSize in FlushInfo
    Directory dir = newDirectory();
    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
    // we want a single large enough segment so that a doc-values update writes a large file
    conf.setMergePolicy(NoMergePolicy.INSTANCE);
    // manually flush
    conf.setMaxBufferedDocs(Integer.MAX_VALUE);
    conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
    IndexWriter writer = new IndexWriter(dir, conf);
    for (int i = 0; i < 100; i++) {
        writer.addDocument(doc(i));
    }
    writer.commit();
    writer.close();
    NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, 100, 1 / (1024. * 1024.));
    conf = newIndexWriterConfig(new MockAnalyzer(random()));
    // we want a single large enough segment so that a doc-values update writes a large file
    conf.setMergePolicy(NoMergePolicy.INSTANCE);
    // manually flush
    conf.setMaxBufferedDocs(Integer.MAX_VALUE);
    conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
    writer = new IndexWriter(cachingDir, conf);
    writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 100L);
    // flush
    DirectoryReader reader = DirectoryReader.open(writer);
    assertEquals(0, cachingDir.listCachedFiles().length);
    IOUtils.close(reader, writer, cachingDir);
}
Also used : MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) Directory(org.apache.lucene.store.Directory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) Test(org.junit.Test)

Aggregations

NRTCachingDirectory (org.apache.lucene.store.NRTCachingDirectory)8 Directory (org.apache.lucene.store.Directory)7 MockAnalyzer (org.apache.lucene.analysis.MockAnalyzer)3 IndexWriter (org.apache.lucene.index.IndexWriter)3 Test (org.junit.Test)3 ArrayList (java.util.ArrayList)2 Configuration (org.apache.hadoop.conf.Configuration)2 Path (org.apache.hadoop.fs.Path)2 IndexWriterConfig (org.apache.lucene.index.IndexWriterConfig)2 BlockCache (org.apache.solr.store.blockcache.BlockCache)2 BlockDirectory (org.apache.solr.store.blockcache.BlockDirectory)2 BlockDirectoryCache (org.apache.solr.store.blockcache.BlockDirectoryCache)2 Cache (org.apache.solr.store.blockcache.Cache)2 IOException (java.io.IOException)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 LuceneIndexWriter (org.apache.jackrabbit.oak.plugins.index.lucene.writer.LuceneIndexWriter)1 Document (org.apache.lucene.document.Document)1 TextField (org.apache.lucene.document.TextField)1 IndexCommit (org.apache.lucene.index.IndexCommit)1 KeepOnlyLastCommitDeletionPolicy (org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy)1