use of org.apache.solr.core.SolrResourceLoader in project ddf by codice.
the class EmbeddedSolrFactory method getEmbeddedSolrServer.
/**
* Creates a new {@link EmbeddedSolrServer} using the Solr core and configuration file names,
* schema and configuration file proxy provided.
*
* @param coreName name of the Solr core
* @param solrConfigXml name of the Solr configuration file. Defaults to
* {@value HttpSolrClientFactory#DEFAULT_SOLRCONFIG_XML} if
* {@code null}.
* @param schemaXml file name of the Solr core schema. Defaults to
* {@value HttpSolrClientFactory#DEFAULT_SCHEMA_XML} if
* {@code null}.
* @param givenConfigFileProxy {@link ConfigurationFileProxy} instance to use. If {@code null},
* a new {@link ConfigurationFileProxy} will be used.
* @return a new {@link EmbeddedSolrServer} instance
*/
public static EmbeddedSolrServer getEmbeddedSolrServer(String coreName, @Nullable String solrConfigXml, @Nullable String schemaXml, @Nullable ConfigurationFileProxy givenConfigFileProxy) {
LOGGER.debug("Retrieving embedded solr with the following properties: [{},{},{}]", solrConfigXml, schemaXml, givenConfigFileProxy);
String solrConfigFileName = HttpSolrClientFactory.DEFAULT_SOLRCONFIG_XML;
String schemaFileName = HttpSolrClientFactory.DEFAULT_SCHEMA_XML;
if (isNotBlank(solrConfigXml)) {
solrConfigFileName = solrConfigXml;
}
if (isNotBlank(schemaXml)) {
schemaFileName = schemaXml;
}
ConfigurationFileProxy configProxy = givenConfigFileProxy;
if (givenConfigFileProxy == null) {
configProxy = new ConfigurationFileProxy(ConfigurationStore.getInstance());
}
configProxy.writeSolrConfiguration(coreName);
File solrConfigFile = getConfigFile(solrConfigFileName, configProxy, coreName);
File solrSchemaFile = getConfigFile(schemaFileName, configProxy, coreName);
if (solrSchemaFile == null) {
solrSchemaFile = getConfigFile("managed-schema", configProxy, coreName);
if (solrSchemaFile == null) {
throw new IllegalArgumentException("Unable to find Solr schema file.");
}
}
File solrConfigHome = new File(solrConfigFile.getParent());
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(EmbeddedSolrFactory.class.getClassLoader());
// NamedSPILoader uses the thread context classloader to lookup
// codecs, posting formats, and analyzers
SolrConfig solrConfig = new SolrConfig(Paths.get(solrConfigHome.getParent()), solrConfigFileName, new InputSource(FileUtils.openInputStream(solrConfigFile)));
IndexSchema indexSchema = new IndexSchema(solrConfig, schemaFileName, new InputSource(FileUtils.openInputStream(solrSchemaFile)));
SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrConfigHome.getAbsolutePath()));
SolrCoreContainer container = new SolrCoreContainer(loader);
String dataDirPath = null;
if (!ConfigurationStore.getInstance().isInMemory()) {
File dataDir = configProxy.getDataDirectory();
if (dataDir != null) {
dataDirPath = Paths.get(dataDir.getAbsolutePath(), coreName, "data").toString();
LOGGER.debug("Using data directory [{}]", dataDirPath);
}
} else {
PluginInfo info = solrConfig.getPluginInfo(DirectoryFactory.class.getName());
if (info != null && !"solr.RAMDirectoryFactory".equals(info.className)) {
LOGGER.debug("Using in-memory configuration without RAMDirectoryFactory.");
}
}
CoreDescriptor coreDescriptor = new CoreDescriptor(container, coreName, solrConfig.getResourceLoader().getInstancePath());
SolrCore core = new SolrCore(coreName, dataDirPath, solrConfig, indexSchema, null, coreDescriptor, null, null, null);
container.register(coreName, core, false, true);
return new EmbeddedSolrServer(container, coreName);
} catch (ParserConfigurationException | IOException | SAXException e) {
throw new IllegalArgumentException("Unable to parse Solr configuration file: " + solrConfigFileName, e);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
use of org.apache.solr.core.SolrResourceLoader in project SearchServices by Alfresco.
the class AbstractAlfrescoSolrTests method createAlfrescoCore.
public static void createAlfrescoCore(String schema) throws ParserConfigurationException, IOException, SAXException {
Properties properties = new Properties();
properties.put("solr.tests.maxBufferedDocs", "1000");
properties.put("solr.tests.maxIndexingThreads", "10");
properties.put("solr.tests.ramBufferSizeMB", "1024");
properties.put("solr.tests.mergeScheduler", "org.apache.lucene.index.ConcurrentMergeScheduler");
properties.put("alfresco.acl.tracker.cron", "0/10 * * * * ? *");
properties.put("alfresco.content.tracker.cron", "0/10 * * * * ? *");
properties.put("alfresco.metadata.tracker.cron", "0/10 * * * * ? *");
properties.put("alfresco.cascade.tracker.cron", "0/10 * * * * ? *");
properties.put("alfresco.commit.tracker.cron", "0/10 * * * * ? *");
if ("schema.xml".equalsIgnoreCase(schema)) {
String templateName = "rerank";
String templateNameSystemProperty = System.getProperty("templateName");
if (StringUtils.isNotBlank(templateNameSystemProperty)) {
templateName = templateNameSystemProperty;
}
FileUtils.copyFile(Paths.get(String.format(TEMPLATE_CONF, templateName) + schema).toFile(), Paths.get(TEST_SOLR_CONF + schema).toFile());
}
// The local test solrconfig with RAMDirectoryFactory and lockType of single.
CoreContainer coreContainer = new CoreContainer(TEST_FILES_LOCATION);
SolrResourceLoader resourceLoader = new SolrResourceLoader(Paths.get(TEST_SOLR_CONF), null, properties);
solrConfig = new SolrConfig(resourceLoader, "solrconfig.xml", null);
IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema(schema, solrConfig);
log.info("################ Index schema:" + schema + ":" + indexSchema.getResourceName());
TestCoresLocator locator = new TestCoresLocator(SolrTestCaseJ4.DEFAULT_TEST_CORENAME, "data", solrConfig.getResourceName(), indexSchema.getResourceName());
NodeConfig nodeConfig = new NodeConfig.NodeConfigBuilder("name", coreContainer.getResourceLoader()).setUseSchemaCache(false).setCoreAdminHandlerClass("org.alfresco.solr.AlfrescoCoreAdminHandler").build();
coreContainer.shutdown();
try {
h = new TestHarness(nodeConfig, locator);
h.coreName = SolrTestCaseJ4.DEFAULT_TEST_CORENAME;
} catch (Exception e) {
log.info("we hit an issue", e);
}
lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
}
use of org.apache.solr.core.SolrResourceLoader in project SearchServices by Alfresco.
the class AlfrescoSolrClusteringComponent method inform.
@SuppressWarnings("unchecked")
@Override
public void inform(SolrCore core) {
if (initParams != null) {
log.info("Initializing Clustering Engines");
// Our target list of engines, split into search-results and
// document clustering.
SolrResourceLoader loader = core.getResourceLoader();
for (Map.Entry<String, Object> entry : initParams) {
if ("engine".equals(entry.getKey())) {
NamedList<Object> engineInitParams = (NamedList<Object>) entry.getValue();
Boolean optional = engineInitParams.getBooleanArg("optional");
optional = (optional == null ? Boolean.FALSE : optional);
String engineClassName = StringUtils.defaultIfBlank((String) engineInitParams.get("classname"), CarrotClusteringEngine.class.getName());
// Instantiate the clustering engine and split to
// appropriate map.
final ClusteringEngine engine = loader.newInstance(engineClassName, ClusteringEngine.class);
final String name = StringUtils.defaultIfBlank(engine.init(engineInitParams, core), "");
if (!engine.isAvailable()) {
if (optional) {
log.info("Optional clustering engine not available: " + name);
} else {
throw new SolrException(ErrorCode.SERVER_ERROR, "A required clustering engine failed to initialize, check the logs: " + name);
}
}
final ClusteringEngine previousEntry;
if (engine instanceof SearchClusteringEngine) {
previousEntry = searchClusteringEngines.put(name, (SearchClusteringEngine) engine);
} else if (engine instanceof DocumentClusteringEngine) {
previousEntry = documentClusteringEngines.put(name, (DocumentClusteringEngine) engine);
} else {
log.warn("Unknown type of a clustering engine for class: " + engineClassName);
continue;
}
if (previousEntry != null) {
log.warn("Duplicate clustering engine component named '" + name + "'.");
}
}
}
// Set up the default engine key for both types of engines.
setupDefaultEngine("search results clustering", searchClusteringEngines);
setupDefaultEngine("document clustering", documentClusteringEngines);
log.info("Finished Initializing Clustering Engines");
}
}
use of org.apache.solr.core.SolrResourceLoader in project Anserini by castorini.
the class SolrEndToEndTest method setUp.
@Before
@Override
public void setUp() throws Exception {
super.setUp();
final File solrHome = createTempDir().toFile();
final File configSetBaseDir = new File(solrHome.toPath() + File.separator + "configsets");
FileUtils.copyDirectory(getFile("solr/anserini"), new File(configSetBaseDir + File.separator + "anserini"));
SolrResourceLoader loader = new SolrResourceLoader(solrHome.toPath());
NodeConfig config = new NodeConfig.NodeConfigBuilder("embeddedSolrServerNode", loader.getInstancePath()).setConfigSetBaseDirectory(configSetBaseDir.getAbsolutePath()).build();
client = new EmbeddedSolrServer(config, getCollectionName());
LOG.info("Created Embedded Solr Server");
CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create();
createRequest.setCoreName(getCollectionName());
createRequest.setConfigSet("anserini");
createRequest.process(client);
client.commit();
LOG.info("Created Solr Core: " + getCollectionName());
GenericObjectPoolConfig<SolrClient> poolConfig = new GenericObjectPoolConfig<>();
// only 1 EmbeddedSolrServer instance will be created by getSolrClient
poolConfig.setMaxTotal(1);
poolConfig.setMinIdle(1);
stubSolrPool = new GenericObjectPool<>(new StubSolrClientFactory(client), poolConfig);
}
Aggregations