Search in sources :

Example 1 with HdfsDirectory

use of org.apache.solr.store.hdfs.HdfsDirectory in project lucene-solr by apache.

the class BlockDirectory method getFileModified.

private long getFileModified(String name) throws IOException {
    if (in instanceof FSDirectory) {
        File directory = ((FSDirectory) in).getDirectory().toFile();
        File file = new File(directory, name);
        if (!file.exists()) {
            throw new FileNotFoundException("File [" + name + "] not found");
        }
        return file.lastModified();
    } else if (in instanceof HdfsDirectory) {
        return ((HdfsDirectory) in).fileModified(name);
    } else {
        throw new UnsupportedOperationException();
    }
}
Also used : FileNotFoundException(java.io.FileNotFoundException) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) FSDirectory(org.apache.lucene.store.FSDirectory) File(java.io.File)

Example 2 with HdfsDirectory

use of org.apache.solr.store.hdfs.HdfsDirectory in project lucene-solr by apache.

the class CheckHdfsIndexTest method setUp.

@Override
@Before
public void setUp() throws Exception {
    super.setUp();
    Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
    conf.setBoolean("fs.hdfs.impl.disable.cache", true);
    directory = new HdfsDirectory(path, conf);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) Before(org.junit.Before)

Example 3 with HdfsDirectory

use of org.apache.solr.store.hdfs.HdfsDirectory in project lucene-solr by apache.

the class CheckHdfsIndex method doMain.

// actual main: returns exit code instead of terminating JVM (for easy testing)
@SuppressForbidden(reason = "System.out required: command line tool")
protected static int doMain(String[] args) throws IOException, InterruptedException {
    CheckIndex.Options opts;
    try {
        opts = CheckIndex.parseOptions(args);
    } catch (IllegalArgumentException e) {
        System.out.println(e.getMessage());
        return 1;
    }
    if (!CheckIndex.assertsOn()) {
        System.out.println("\nNOTE: testing will be more thorough if you run java with '-ea:org.apache.lucene...', so assertions are enabled");
    }
    if (opts.getDirImpl() != null) {
        System.out.println("\nIgnoring specified -dir-impl, instead using " + HdfsDirectory.class.getSimpleName());
    }
    System.out.println("\nOpening index @ " + opts.getIndexPath() + "\n");
    Directory directory;
    try {
        directory = new HdfsDirectory(new Path(opts.getIndexPath()), getConf());
    } catch (IOException e) {
        System.out.println("ERROR: could not open hdfs directory \"" + opts.getIndexPath() + "\"; exiting");
        e.printStackTrace(System.out);
        return 1;
    }
    try (Directory dir = directory;
        CheckIndex checker = new CheckIndex(dir)) {
        opts.setOut(System.out);
        return checker.doCheck(opts);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) IOException(java.io.IOException) CheckIndex(org.apache.lucene.index.CheckIndex) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) Directory(org.apache.lucene.store.Directory) SuppressForbidden(org.apache.lucene.util.SuppressForbidden)

Example 4 with HdfsDirectory

use of org.apache.solr.store.hdfs.HdfsDirectory in project lucene-solr by apache.

the class HdfsDirectoryFactory method move.

@Override
public void move(Directory fromDir, Directory toDir, String fileName, IOContext ioContext) throws IOException {
    Directory baseFromDir = getBaseDir(fromDir);
    Directory baseToDir = getBaseDir(toDir);
    if (baseFromDir instanceof HdfsDirectory && baseToDir instanceof HdfsDirectory) {
        Path dir1 = ((HdfsDirectory) baseFromDir).getHdfsDirPath();
        Path dir2 = ((HdfsDirectory) baseToDir).getHdfsDirPath();
        Path file1 = new Path(dir1, fileName);
        Path file2 = new Path(dir2, fileName);
        FileContext fileContext = FileContext.getFileContext(getConf());
        fileContext.rename(file1, file2);
        return;
    }
    super.move(fromDir, toDir, fileName, ioContext);
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) FileContext(org.apache.hadoop.fs.FileContext) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) Directory(org.apache.lucene.store.Directory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) BlockDirectory(org.apache.solr.store.blockcache.BlockDirectory)

Example 5 with HdfsDirectory

use of org.apache.solr.store.hdfs.HdfsDirectory in project lucene-solr by apache.

the class HdfsDirectoryFactory method create.

@Override
protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
    assert params != null : "init must be called before create";
    LOG.info("creating directory factory for path {}", path);
    Configuration conf = getConf();
    if (metrics == null) {
        metrics = MetricsHolder.metrics;
    }
    boolean blockCacheEnabled = getConfig(BLOCKCACHE_ENABLED, true);
    boolean blockCacheGlobal = getConfig(BLOCKCACHE_GLOBAL, true);
    boolean blockCacheReadEnabled = getConfig(BLOCKCACHE_READ_ENABLED, true);
    final HdfsDirectory hdfsDir;
    final Directory dir;
    if (blockCacheEnabled && dirContext != DirContext.META_DATA) {
        int numberOfBlocksPerBank = getConfig(NUMBEROFBLOCKSPERBANK, 16384);
        int blockSize = BlockDirectory.BLOCK_SIZE;
        int bankCount = getConfig(BLOCKCACHE_SLAB_COUNT, 1);
        boolean directAllocation = getConfig(BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true);
        int slabSize = numberOfBlocksPerBank * blockSize;
        LOG.info("Number of slabs of block cache [{}] with direct memory allocation set to [{}]", bankCount, directAllocation);
        LOG.info("Block cache target memory usage, slab size of [{}] will allocate [{}] slabs and use ~[{}] bytes", new Object[] { slabSize, bankCount, ((long) bankCount * (long) slabSize) });
        int bsBufferSize = params.getInt("solr.hdfs.blockcache.bufferstore.buffersize", blockSize);
        // this is actually total size
        int bsBufferCount = params.getInt("solr.hdfs.blockcache.bufferstore.buffercount", 0);
        BlockCache blockCache = getBlockDirectoryCache(numberOfBlocksPerBank, blockSize, bankCount, directAllocation, slabSize, bsBufferSize, bsBufferCount, blockCacheGlobal);
        Cache cache = new BlockDirectoryCache(blockCache, path, metrics, blockCacheGlobal);
        int readBufferSize = params.getInt("solr.hdfs.blockcache.read.buffersize", blockSize);
        hdfsDir = new HdfsDirectory(new Path(path), lockFactory, conf, readBufferSize);
        dir = new BlockDirectory(path, hdfsDir, cache, null, blockCacheReadEnabled, false, cacheMerges, cacheReadOnce);
    } else {
        hdfsDir = new HdfsDirectory(new Path(path), conf);
        dir = hdfsDir;
    }
    if (params.getBool(LOCALITYMETRICS_ENABLED, false)) {
        LocalityHolder.reporter.registerDirectory(hdfsDir);
    }
    boolean nrtCachingDirectory = getConfig(NRTCACHINGDIRECTORY_ENABLE, true);
    if (nrtCachingDirectory) {
        double nrtCacheMaxMergeSizeMB = getConfig(NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 16);
        double nrtCacheMaxCacheMB = getConfig(NRTCACHINGDIRECTORY_MAXCACHEMB, 192);
        return new NRTCachingDirectory(dir, nrtCacheMaxMergeSizeMB, nrtCacheMaxCacheMB);
    }
    return dir;
}
Also used : Path(org.apache.hadoop.fs.Path) BlockDirectory(org.apache.solr.store.blockcache.BlockDirectory) Configuration(org.apache.hadoop.conf.Configuration) BlockDirectoryCache(org.apache.solr.store.blockcache.BlockDirectoryCache) BlockCache(org.apache.solr.store.blockcache.BlockCache) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) HdfsDirectory(org.apache.solr.store.hdfs.HdfsDirectory) Directory(org.apache.lucene.store.Directory) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory) BlockDirectory(org.apache.solr.store.blockcache.BlockDirectory) Cache(org.apache.solr.store.blockcache.Cache) BlockDirectoryCache(org.apache.solr.store.blockcache.BlockDirectoryCache) BlockCache(org.apache.solr.store.blockcache.BlockCache) NRTCachingDirectory(org.apache.lucene.store.NRTCachingDirectory)

Aggregations

HdfsDirectory (org.apache.solr.store.hdfs.HdfsDirectory)8 Path (org.apache.hadoop.fs.Path)6 Directory (org.apache.lucene.store.Directory)6 IOException (java.io.IOException)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 Configuration (org.apache.hadoop.conf.Configuration)2 IndexReader (org.apache.lucene.index.IndexReader)2 NRTCachingDirectory (org.apache.lucene.store.NRTCachingDirectory)2 BlockDirectory (org.apache.solr.store.blockcache.BlockDirectory)2 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 CarbonTablePath (org.apache.carbondata.core.util.path.CarbonTablePath)1 FileContext (org.apache.hadoop.fs.FileContext)1 StandardAnalyzer (org.apache.lucene.analysis.standard.StandardAnalyzer)1 CheckIndex (org.apache.lucene.index.CheckIndex)1 IndexWriter (org.apache.lucene.index.IndexWriter)1 IndexWriterConfig (org.apache.lucene.index.IndexWriterConfig)1 IndexSearcher (org.apache.lucene.search.IndexSearcher)1 FSDirectory (org.apache.lucene.store.FSDirectory)1 RAMDirectory (org.apache.lucene.store.RAMDirectory)1