use of org.apache.solr.core.SolrResourceLoader in project lucene-solr by apache.
the class SchemaManager method doOperations.
private List doOperations(List<CommandOperation> operations) throws InterruptedException, IOException, KeeperException {
//The default timeout is 10 minutes when no BaseSolrResource.UPDATE_TIMEOUT_SECS is specified
int timeout = req.getParams().getInt(BaseSolrResource.UPDATE_TIMEOUT_SECS, 600);
//If BaseSolrResource.UPDATE_TIMEOUT_SECS=0 or -1 then end time then we'll try for 10 mins ( default timeout )
if (timeout < 1) {
timeout = 600;
}
TimeOut timeOut = new TimeOut(timeout, TimeUnit.SECONDS);
SolrCore core = req.getCore();
String errorMsg = "Unable to persist managed schema. ";
List errors = Collections.emptyList();
int latestVersion = -1;
synchronized (req.getSchema().getSchemaUpdateLock()) {
while (!timeOut.hasTimedOut()) {
managedIndexSchema = getFreshManagedSchema(req.getCore());
for (CommandOperation op : operations) {
OpType opType = OpType.get(op.name);
if (opType != null) {
opType.perform(op, this);
} else {
op.addError("No such operation : " + op.name);
}
}
errors = CommandOperation.captureErrors(operations);
if (!errors.isEmpty())
break;
SolrResourceLoader loader = req.getCore().getResourceLoader();
if (loader instanceof ZkSolrResourceLoader) {
ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader) loader;
StringWriter sw = new StringWriter();
try {
managedIndexSchema.persist(sw);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "unable to serialize schema");
//unlikely
}
try {
latestVersion = ZkController.persistConfigResourceToZooKeeper(zkLoader, managedIndexSchema.getSchemaZkVersion(), managedIndexSchema.getResourceName(), sw.toString().getBytes(StandardCharsets.UTF_8), true);
req.getCore().getCoreContainer().reload(req.getCore().getName());
break;
} catch (ZkController.ResourceModifiedInZkException e) {
log.info("Schema was modified by another node. Retrying..");
}
} else {
try {
//only for non cloud stuff
managedIndexSchema.persistManagedSchema(false);
core.setLatestSchema(managedIndexSchema);
} catch (SolrException e) {
log.warn(errorMsg);
errors = singletonList(errorMsg + e.getMessage());
}
break;
}
}
}
if (req.getCore().getResourceLoader() instanceof ZkSolrResourceLoader) {
// Don't block further schema updates while waiting for a pending update to propagate to other replicas.
// This reduces the likelihood of a (time-limited) distributed deadlock during concurrent schema updates.
waitForOtherReplicasToUpdate(timeOut, latestVersion);
}
if (errors.isEmpty() && timeOut.hasTimedOut()) {
log.warn(errorMsg + "Timed out.");
errors = singletonList(errorMsg + "Timed out.");
}
return errors;
}
use of org.apache.solr.core.SolrResourceLoader in project lucene-solr by apache.
the class InvokeOp method invokeAClass.
static Map<String, Object> invokeAClass(SolrQueryRequest req, String c) {
SolrResourceLoader loader = null;
if (req.getCore() != null)
loader = req.getCore().getResourceLoader();
else if (req.getContext().get(CoreContainer.class.getName()) != null) {
CoreContainer cc = (CoreContainer) req.getContext().get(CoreContainer.class.getName());
loader = cc.getResourceLoader();
}
CoreAdminHandler.Invocable invokable = loader.newInstance(c, CoreAdminHandler.Invocable.class);
Map<String, Object> result = invokable.invoke(req);
log.info("Invocable_invoked {}", result);
return result;
}
use of org.apache.solr.core.SolrResourceLoader in project lucene-solr by apache.
the class TestWordDelimiterFilterFactory method testCustomTypes.
@Test
public void testCustomTypes() throws Exception {
String testText = "I borrowed $5,400.00 at 25% interest-rate";
ResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1"));
Map<String, String> args = new HashMap<>();
args.put("luceneMatchVersion", Version.LATEST.toString());
args.put("generateWordParts", "1");
args.put("generateNumberParts", "1");
args.put("catenateWords", "1");
args.put("catenateNumbers", "1");
args.put("catenateAll", "0");
args.put("splitOnCaseChange", "1");
/* default behavior */
WordDelimiterFilterFactory factoryDefault = new WordDelimiterFilterFactory(args);
factoryDefault.inform(loader);
TokenStream ts = factoryDefault.create(whitespaceMockTokenizer(testText));
BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] { "I", "borrowed", "5", "540000", "400", "00", "at", "25", "interest", "interestrate", "rate" });
ts = factoryDefault.create(whitespaceMockTokenizer("foobar"));
BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] { "foo", "foobar", "bar" });
/* custom behavior */
args = new HashMap<>();
// use a custom type mapping
args.put("luceneMatchVersion", Version.LATEST.toString());
args.put("generateWordParts", "1");
args.put("generateNumberParts", "1");
args.put("catenateWords", "1");
args.put("catenateNumbers", "1");
args.put("catenateAll", "0");
args.put("splitOnCaseChange", "1");
args.put("types", "wdftypes.txt");
WordDelimiterFilterFactory factoryCustom = new WordDelimiterFilterFactory(args);
factoryCustom.inform(loader);
ts = factoryCustom.create(whitespaceMockTokenizer(testText));
BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] { "I", "borrowed", "$5,400.00", "at", "25%", "interest", "interestrate", "rate" });
/* test custom behavior with a char > 0x7F, because we had to make a larger byte[] */
ts = factoryCustom.create(whitespaceMockTokenizer("foobar"));
BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] { "foobar" });
}
use of org.apache.solr.core.SolrResourceLoader in project ddf by codice.
the class EmbeddedSolrFactory method getEmbeddedSolrServer.
/**
* Creates a new {@link EmbeddedSolrServer} using the Solr core and configuration file names,
* schema and configuration file proxy provided.
*
* @param coreName name of the Solr core
* @param solrConfigXml name of the Solr configuration file. Defaults to
* {@value HttpSolrClientFactory#DEFAULT_SOLRCONFIG_XML} if
* {@code null}.
* @param schemaXml file name of the Solr core schema. Defaults to
* {@value HttpSolrClientFactory#DEFAULT_SCHEMA_XML} if
* {@code null}.
* @param givenConfigFileProxy {@link ConfigurationFileProxy} instance to use. If {@code null},
* a new {@link ConfigurationFileProxy} will be used.
* @return a new {@link EmbeddedSolrServer} instance
*/
public static EmbeddedSolrServer getEmbeddedSolrServer(String coreName, @Nullable String solrConfigXml, @Nullable String schemaXml, @Nullable ConfigurationFileProxy givenConfigFileProxy) {
LOGGER.debug("Retrieving embedded solr with the following properties: [{},{},{}]", solrConfigXml, schemaXml, givenConfigFileProxy);
String solrConfigFileName = HttpSolrClientFactory.DEFAULT_SOLRCONFIG_XML;
String schemaFileName = HttpSolrClientFactory.DEFAULT_SCHEMA_XML;
if (isNotBlank(solrConfigXml)) {
solrConfigFileName = solrConfigXml;
}
if (isNotBlank(schemaXml)) {
schemaFileName = schemaXml;
}
ConfigurationFileProxy configProxy = givenConfigFileProxy;
if (givenConfigFileProxy == null) {
configProxy = new ConfigurationFileProxy(ConfigurationStore.getInstance());
}
configProxy.writeSolrConfiguration(coreName);
File solrConfigFile = getConfigFile(solrConfigFileName, configProxy, coreName);
File solrSchemaFile = getConfigFile(schemaFileName, configProxy, coreName);
if (solrSchemaFile == null) {
solrSchemaFile = getConfigFile("managed-schema", configProxy, coreName);
if (solrSchemaFile == null) {
throw new IllegalArgumentException("Unable to find Solr schema file.");
}
}
File solrConfigHome = new File(solrConfigFile.getParent());
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(EmbeddedSolrFactory.class.getClassLoader());
// NamedSPILoader uses the thread context classloader to lookup
// codecs, posting formats, and analyzers
SolrConfig solrConfig = new SolrConfig(Paths.get(solrConfigHome.getParent()), solrConfigFileName, new InputSource(FileUtils.openInputStream(solrConfigFile)));
IndexSchema indexSchema = new IndexSchema(solrConfig, schemaFileName, new InputSource(FileUtils.openInputStream(solrSchemaFile)));
SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrConfigHome.getAbsolutePath()));
SolrCoreContainer container = new SolrCoreContainer(loader);
String dataDirPath = null;
if (!ConfigurationStore.getInstance().isInMemory()) {
File dataDir = configProxy.getDataDirectory();
if (dataDir != null) {
dataDirPath = Paths.get(dataDir.getAbsolutePath(), coreName, "data").toString();
LOGGER.debug("Using data directory [{}]", dataDirPath);
}
} else {
PluginInfo info = solrConfig.getPluginInfo(DirectoryFactory.class.getName());
if (info != null && !"solr.RAMDirectoryFactory".equals(info.className)) {
LOGGER.debug("Using in-memory configuration without RAMDirectoryFactory.");
}
}
CoreDescriptor coreDescriptor = new CoreDescriptor(container, coreName, solrConfig.getResourceLoader().getInstancePath());
SolrCore core = new SolrCore(coreName, dataDirPath, solrConfig, indexSchema, null, coreDescriptor, null, null, null);
container.register(coreName, core, false, true);
return new EmbeddedSolrServer(container, coreName);
} catch (ParserConfigurationException | IOException | SAXException e) {
throw new IllegalArgumentException("Unable to parse Solr configuration file: " + solrConfigFileName, e);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
use of org.apache.solr.core.SolrResourceLoader in project ddf by codice.
the class FilteringSolrIndex method createSolrServer.
private static EmbeddedSolrServer createSolrServer(String coreName, ConfigurationFileProxy configProxy) {
File configFile = getConfigFile(IMMEMORY_SOLRCONFIG_XML, configProxy, coreName);
if (configFile == null) {
throw new IllegalArgumentException("Unable to find Solr configuration file");
}
File schemaFile = getConfigFile(DEFAULT_SCHEMA_XML, configProxy, coreName);
if (schemaFile == null) {
throw new IllegalArgumentException("Unable to find Solr schema file");
}
File solrConfigHome = new File(configFile.getParent());
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(EmbeddedSolrFactory.class.getClassLoader());
SolrConfig solrConfig = new SolrConfig(Paths.get(solrConfigHome.getParent()), IMMEMORY_SOLRCONFIG_XML, new InputSource(FileUtils.openInputStream(configFile)));
if (indexSchema == null) {
indexSchema = new IndexSchema(solrConfig, DEFAULT_SCHEMA_XML, new InputSource(FileUtils.openInputStream(schemaFile)));
}
SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrConfigHome.getAbsolutePath()));
SolrCoreContainer container = new SolrCoreContainer(loader);
CoreDescriptor coreDescriptor = new CoreDescriptor(container, coreName, solrConfig.getResourceLoader().getInstancePath());
SolrCore core = new SolrCore(coreName, null, solrConfig, indexSchema, null, coreDescriptor, null, null, null);
container.register(coreName, core, false, true);
return new EmbeddedSolrServer(container, coreName);
} catch (ParserConfigurationException | SAXException | IOException e) {
throw new IllegalArgumentException("Unable to parse Solr configuration file", e);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
Aggregations