use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class SolrGraphiteReporterTest method testReporter.
@Test
public void testReporter() throws Exception {
int jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1 : 0;
Path home = Paths.get(TEST_HOME());
// define these properties, they are used in solrconfig.xml
System.setProperty("solr.test.sys.prop1", "propone");
System.setProperty("solr.test.sys.prop2", "proptwo");
MockGraphite mock = new MockGraphite();
try {
mock.start();
Thread.sleep(1000);
// define the port where MockGraphite is running
System.setProperty("mock-graphite-port", String.valueOf(mock.port));
String solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr-graphitereporter.xml").toFile(), "UTF-8");
NodeConfig cfg = SolrXmlConfig.fromString(new SolrResourceLoader(home), solrXml);
CoreContainer cc = createCoreContainer(cfg, new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
h.coreName = DEFAULT_TEST_CORENAME;
SolrMetricManager metricManager = cc.getMetricManager();
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.node");
assertEquals(1 + jmxReporter, reporters.size());
SolrMetricReporter reporter = reporters.get("test");
assertNotNull(reporter);
assertTrue(reporter instanceof SolrGraphiteReporter);
Thread.sleep(5000);
assertTrue(mock.lines.size() >= 3);
String[] frozenLines = (String[]) mock.lines.toArray(new String[mock.lines.size()]);
for (String line : frozenLines) {
assertTrue(line, line.startsWith("test.solr.node.CONTAINER.cores."));
}
} finally {
mock.close();
}
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class CleanupOldIndexTest method test.
@Test
public void test() throws Exception {
CollectionAdminRequest.createCollection(COLLECTION, "conf1", 1, 2).processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT);
// TODO make this configurable on StoppableIndexingThread
cluster.getSolrClient().setDefaultCollection(COLLECTION);
int[] maxDocList = new int[] { 300, 500, 700 };
int maxDoc = maxDocList[random().nextInt(maxDocList.length - 1)];
StoppableIndexingThread indexThread = new StoppableIndexingThread(null, cluster.getSolrClient(), "1", true, maxDoc, 1, true);
indexThread.start();
// give some time to index...
int[] waitTimes = new int[] { 3000, 4000 };
Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
// create some "old" index directories
JettySolrRunner jetty = cluster.getRandomJetty(random());
CoreContainer coreContainer = jetty.getCoreContainer();
File dataDir = null;
try (SolrCore solrCore = coreContainer.getCore(coreContainer.getCoreDescriptors().get(0).getName())) {
dataDir = new File(solrCore.getDataDir());
}
assertTrue(dataDir.isDirectory());
long msInDay = 60 * 60 * 24L;
String timestamp1 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(1 * msInDay));
String timestamp2 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(2 * msInDay));
File oldIndexDir1 = new File(dataDir, "index." + timestamp1);
FileUtils.forceMkdir(oldIndexDir1);
File oldIndexDir2 = new File(dataDir, "index." + timestamp2);
FileUtils.forceMkdir(oldIndexDir2);
// verify the "old" index directories exist
assertTrue(oldIndexDir1.isDirectory());
assertTrue(oldIndexDir2.isDirectory());
// bring shard replica down
ChaosMonkey.stop(jetty);
// wait a moment - lets allow some docs to be indexed so replication time is non 0
Thread.sleep(waitTimes[random().nextInt(waitTimes.length - 1)]);
// bring shard replica up
ChaosMonkey.start(jetty);
// make sure replication can start
Thread.sleep(3000);
// stop indexing threads
indexThread.safeStop();
indexThread.join();
cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 1, 2));
assertTrue(!oldIndexDir1.isDirectory());
assertTrue(!oldIndexDir2.isDirectory());
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class ClusterStateUpdateTest method setUp.
@Override
public void setUp() throws Exception {
super.setUp();
System.setProperty("zkClientTimeout", "3000");
File tmpDir = createTempDir("zkData").toFile();
zkDir = tmpDir.getAbsolutePath();
zkServer = new ZkTestServer(zkDir);
zkServer.run();
System.setProperty("zkHost", zkServer.getZkAddress());
AbstractZkTestCase.buildZooKeeper(zkServer.getZkHost(), zkServer.getZkAddress(), "solrconfig.xml", "schema.xml");
log.info("####SETUP_START " + getTestName());
dataDir1 = new File(tmpDir + File.separator + "data1");
dataDir1.mkdirs();
dataDir2 = new File(tmpDir + File.separator + "data2");
dataDir2.mkdirs();
dataDir3 = new File(tmpDir + File.separator + "data3");
dataDir3.mkdirs();
dataDir4 = new File(tmpDir + File.separator + "data4");
dataDir4.mkdirs();
// set some system properties for use by tests
System.setProperty("solr.test.sys.prop1", "propone");
System.setProperty("solr.test.sys.prop2", "proptwo");
System.setProperty("solr.solr.home", TEST_HOME());
System.setProperty("hostPort", "1661");
System.setProperty("solr.data.dir", ClusterStateUpdateTest.this.dataDir1.getAbsolutePath());
container1 = new CoreContainer(solrHomeDirectory.getAbsolutePath());
container1.load();
System.clearProperty("hostPort");
System.setProperty("hostPort", "1662");
System.setProperty("solr.data.dir", ClusterStateUpdateTest.this.dataDir2.getAbsolutePath());
container2 = new CoreContainer(solrHomeDirectory.getAbsolutePath());
container2.load();
System.clearProperty("hostPort");
System.setProperty("hostPort", "1663");
System.setProperty("solr.data.dir", ClusterStateUpdateTest.this.dataDir3.getAbsolutePath());
container3 = new CoreContainer(solrHomeDirectory.getAbsolutePath());
container3.load();
System.clearProperty("hostPort");
System.clearProperty("solr.solr.home");
log.info("####SETUP_END " + getTestName());
}
use of org.apache.solr.core.CoreContainer in project jackrabbit-oak by apache.
the class EmbeddedSolrServerProvider method createSolrServer.
private SolrServer createSolrServer() throws Exception {
log.info("creating new embedded solr server with config: {}", solrServerConfiguration);
String solrHomePath = solrServerConfiguration.getSolrHomePath();
String coreName = solrServerConfiguration.getCoreName();
EmbeddedSolrServerConfiguration.HttpConfiguration httpConfiguration = solrServerConfiguration.getHttpConfiguration();
if (solrHomePath != null && coreName != null) {
checkSolrConfiguration(solrHomePath, coreName);
if (httpConfiguration != null) {
if (log.isInfoEnabled()) {
log.info("starting embedded Solr server with http bindings");
}
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(JettySolrRunner.class.getClassLoader());
Integer httpPort = httpConfiguration.getHttpPort();
String context = httpConfiguration.getContext();
JettySolrRunner jettySolrRunner = null;
try {
jettySolrRunner = new JettySolrRunner(solrHomePath, context, httpPort, "solrconfig.xml", "schema.xml", true);
if (log.isInfoEnabled()) {
log.info("Jetty runner instantiated");
}
jettySolrRunner.start(true);
if (log.isInfoEnabled()) {
log.info("Jetty runner started");
}
} catch (Exception t) {
if (log.isErrorEnabled()) {
log.error("an error has occurred while starting Solr Jetty", t);
}
} finally {
if (jettySolrRunner != null && !jettySolrRunner.isRunning()) {
try {
jettySolrRunner.stop();
if (log.isInfoEnabled()) {
log.info("Jetty runner stopped");
}
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("error while stopping the Jetty runner", e);
}
}
}
Thread.currentThread().setContextClassLoader(classLoader);
}
if (log.isInfoEnabled()) {
log.info("starting HTTP Solr server");
}
return new HttpWithJettySolrServer(SolrServerConfigurationDefaults.LOCAL_BASE_URL + ':' + httpPort + context + '/' + coreName, jettySolrRunner);
} else {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(CoreContainer.class.getClassLoader());
CoreContainer coreContainer = new CoreContainer(solrHomePath);
try {
if (!coreContainer.isLoaded(coreName)) {
coreContainer.load();
}
} catch (Exception e) {
log.error("cannot load core {}, shutting down embedded Solr..", coreName, e);
try {
coreContainer.shutdown();
} catch (Exception se) {
log.error("could not shutdown embedded Solr", se);
}
return null;
} finally {
Thread.currentThread().setContextClassLoader(classLoader);
}
EmbeddedSolrServer server = new EmbeddedSolrServer(coreContainer, coreName);
if (server.ping().getStatus() == 0) {
return server;
} else {
throw new IOException("the embedded Solr server is not alive");
}
}
} else {
throw new Exception("SolrServer configuration proprties not set");
}
}
use of org.apache.solr.core.CoreContainer in project stanbol by apache.
the class SolrDispatchFilterComponent method activate.
@Activate
protected void activate(ComponentContext context) throws ConfigurationException, ServletException {
this.context = context;
BundleContext bc = context.getBundleContext();
Object value = context.getProperties().get(PROPERTY_SERVER_NAME);
if (value == null || value.toString().isEmpty()) {
throw new ConfigurationException(PROPERTY_SERVER_NAME, "The configured CoreContainer name MUST NOT be NULL nor empty!");
}
serverName = value.toString();
String filterString = String.format("(&(%s=%s)(%s=%s))", Constants.OBJECTCLASS, CoreContainer.class.getName(), SolrConstants.PROPERTY_SERVER_NAME, serverName);
try {
tracker = new ServiceTracker(bc, bc.createFilter(filterString), trackerCustomizer);
} catch (InvalidSyntaxException e) {
throw new ConfigurationException(PROPERTY_SERVER_NAME, "Unable to build Filter for parsed CoreContainer name '" + serverName + "'", e);
}
value = context.getProperties().get(PROPERTY_PREFIX_PATH);
final String prefixPath;
if (value != null) {
prefix = value.toString();
if (prefix.charAt(0) != '/') {
prefix = '/' + prefix;
}
prefixPath = prefix;
if (!prefix.endsWith("*")) {
//TODO: check if this is a good idea
prefix = prefix + "/.*";
}
} else {
prefixPath = null;
prefix = "/.*";
}
filterPrpoerties = new Hashtable<String, Object>();
if (prefixPath != null) {
filterPrpoerties.put("path-prefix", prefixPath);
}
//now start tracking! ...
// ... as soon as the first CoreContainer is tracked the Filter will
// be created and added to the ExtHttpService
tracker.open();
}
Aggregations