Search in sources :

Example 1 with StandardTokenizerFactory

use of org.apache.lucene.analysis.standard.StandardTokenizerFactory in project lucene-solr by apache.

the class SolrSuggester method init.

/**
   * Uses the <code>config</code> and the <code>core</code> to initialize the underlying 
   * Lucene suggester
   * */
public String init(NamedList<?> config, SolrCore core) {
    LOG.info("init: " + config);
    // read the config
    name = config.get(NAME) != null ? (String) config.get(NAME) : DEFAULT_DICT_NAME;
    sourceLocation = (String) config.get(LOCATION);
    lookupImpl = (String) config.get(LOOKUP_IMPL);
    dictionaryImpl = (String) config.get(DICTIONARY_IMPL);
    String store = (String) config.get(STORE_DIR);
    if (lookupImpl == null) {
        lookupImpl = LookupFactory.DEFAULT_FILE_BASED_DICT;
        LOG.info("No " + LOOKUP_IMPL + " parameter was provided falling back to " + lookupImpl);
    }
    contextFilterQueryAnalyzer = new TokenizerChain(new StandardTokenizerFactory(Collections.EMPTY_MAP), null);
    // initialize appropriate lookup instance
    factory = core.getResourceLoader().newInstance(lookupImpl, LookupFactory.class);
    lookup = factory.create(config, core);
    if (lookup != null && lookup instanceof Closeable) {
        core.addCloseHook(new CloseHook() {

            @Override
            public void preClose(SolrCore core) {
                try {
                    ((Closeable) lookup).close();
                } catch (IOException e) {
                    LOG.warn("Could not close the suggester lookup.", e);
                }
            }

            @Override
            public void postClose(SolrCore core) {
            }
        });
    }
    // if store directory is provided make it or load up the lookup with its content
    if (store != null && !store.isEmpty()) {
        storeDir = new File(store);
        if (!storeDir.isAbsolute()) {
            storeDir = new File(core.getDataDir() + File.separator + storeDir);
        }
        if (!storeDir.exists()) {
            storeDir.mkdirs();
        } else if (getStoreFile().exists()) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("attempt reload of the stored lookup from file " + getStoreFile());
            }
            try {
                lookup.load(new FileInputStream(getStoreFile()));
            } catch (IOException e) {
                LOG.warn("Loading stored lookup data failed, possibly not cached yet");
            }
        }
    }
    // dictionary configuration
    if (dictionaryImpl == null) {
        dictionaryImpl = (sourceLocation == null) ? DictionaryFactory.DEFAULT_INDEX_BASED_DICT : DictionaryFactory.DEFAULT_FILE_BASED_DICT;
        LOG.info("No " + DICTIONARY_IMPL + " parameter was provided falling back to " + dictionaryImpl);
    }
    dictionaryFactory = core.getResourceLoader().newInstance(dictionaryImpl, DictionaryFactory.class);
    dictionaryFactory.setParams(config);
    LOG.info("Dictionary loaded with params: " + config);
    return name;
}
Also used : CloseHook(org.apache.solr.core.CloseHook) TokenizerChain(org.apache.solr.analysis.TokenizerChain) SolrCore(org.apache.solr.core.SolrCore) Closeable(java.io.Closeable) StandardTokenizerFactory(org.apache.lucene.analysis.standard.StandardTokenizerFactory) IOException(java.io.IOException) File(java.io.File) FileInputStream(java.io.FileInputStream)

Aggregations

Closeable (java.io.Closeable)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 IOException (java.io.IOException)1 StandardTokenizerFactory (org.apache.lucene.analysis.standard.StandardTokenizerFactory)1 TokenizerChain (org.apache.solr.analysis.TokenizerChain)1 CloseHook (org.apache.solr.core.CloseHook)1 SolrCore (org.apache.solr.core.SolrCore)1