use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class TestManagedSchemaThreadSafety method createZkController.
private ZkController createZkController(SolrZkClient client) throws KeeperException, InterruptedException {
CoreContainer mockAlwaysUpCoreContainer = mock(CoreContainer.class, Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS));
// Allow retry on session expiry
when(mockAlwaysUpCoreContainer.isShutDown()).thenReturn(Boolean.FALSE);
MockZkController zkController = mock(MockZkController.class, Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS));
when(zkController.getCoreContainer()).thenReturn(mockAlwaysUpCoreContainer);
when(zkController.getZkClient()).thenReturn(client);
Mockito.doAnswer(new Answer<Boolean>() {
volatile boolean sessionExpired = false;
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
String path = (String) invocation.getArguments()[0];
perhapsExpired();
Boolean exists = client.exists(path, true);
perhapsExpired();
return exists;
}
private void perhapsExpired() throws SessionExpiredException {
if (!sessionExpired && rarely()) {
sessionExpired = true;
throw new KeeperException.SessionExpiredException();
}
}
}).when(zkController).pathExists(Mockito.anyString());
return zkController;
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class SolrCmdDistributorTest method test.
@Test
@ShardsFixed(num = 4)
public void test() throws Exception {
del("*:*");
SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler);
ModifiableSolrParams params = new ModifiableSolrParams();
List<Node> nodes = new ArrayList<>();
ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, ((HttpSolrClient) controlClient).getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
// add one doc to controlClient
AddUpdateCommand cmd = new AddUpdateCommand(null);
cmd.solrDoc = sdoc("id", id.incrementAndGet());
params = new ModifiableSolrParams();
cmdDistrib.distribAdd(cmd, nodes, params);
CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
List<Error> errors = cmdDistrib.getErrors();
assertEquals(errors.toString(), 0, errors.size());
long numFound = controlClient.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(1, numFound);
HttpSolrClient client = (HttpSolrClient) clients.get(0);
nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
// add another 2 docs to control and 3 to client
cmdDistrib = new SolrCmdDistributor(updateShardHandler);
cmd.solrDoc = sdoc("id", id.incrementAndGet());
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd, nodes, params);
int id2 = id.incrementAndGet();
AddUpdateCommand cmd2 = new AddUpdateCommand(null);
cmd2.solrDoc = sdoc("id", id2);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd2, nodes, params);
AddUpdateCommand cmd3 = new AddUpdateCommand(null);
cmd3.solrDoc = sdoc("id", id.incrementAndGet());
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribAdd(cmd3, Collections.singletonList(nodes.get(1)), params);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
errors = cmdDistrib.getErrors();
assertEquals(errors.toString(), 0, errors.size());
SolrDocumentList results = controlClient.query(new SolrQuery("*:*")).getResults();
numFound = results.getNumFound();
assertEquals(results.toString(), 3, numFound);
numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(3, numFound);
// now delete doc 2 which is on both control and client1
DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null);
dcmd.id = Integer.toString(id2);
cmdDistrib = new SolrCmdDistributor(updateShardHandler);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribDelete(dcmd, nodes, params);
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
errors = cmdDistrib.getErrors();
assertEquals(errors.toString(), 0, errors.size());
results = controlClient.query(new SolrQuery("*:*")).getResults();
numFound = results.getNumFound();
assertEquals(results.toString(), 2, numFound);
numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound();
assertEquals(results.toString(), 2, numFound);
for (SolrClient c : clients) {
c.optimize();
//System.out.println(clients.get(0).request(new LukeRequest()));
}
cmdDistrib = new SolrCmdDistributor(updateShardHandler);
int cnt = atLeast(303);
for (int i = 0; i < cnt; i++) {
nodes.clear();
for (SolrClient c : clients) {
if (random().nextBoolean()) {
continue;
}
HttpSolrClient httpClient = (HttpSolrClient) c;
nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
}
AddUpdateCommand c = new AddUpdateCommand(null);
c.solrDoc = sdoc("id", id.incrementAndGet());
if (nodes.size() > 0) {
params = new ModifiableSolrParams();
cmdDistrib.distribAdd(c, nodes, params);
}
}
nodes.clear();
for (SolrClient c : clients) {
HttpSolrClient httpClient = (HttpSolrClient) c;
nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, "");
nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps)));
}
final AtomicInteger commits = new AtomicInteger();
for (JettySolrRunner jetty : jettys) {
CoreContainer cores = jetty.getCoreContainer();
try (SolrCore core = cores.getCore("collection1")) {
core.getUpdateHandler().registerCommitCallback(new SolrEventListener() {
@Override
public void init(NamedList args) {
}
@Override
public void postSoftCommit() {
}
@Override
public void postCommit() {
commits.incrementAndGet();
}
@Override
public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {
}
});
}
}
params = new ModifiableSolrParams();
params.set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
cmdDistrib.distribCommit(ccmd, nodes, params);
cmdDistrib.finish();
assertEquals(getShardCount(), commits.get());
for (SolrClient c : clients) {
NamedList<Object> resp = c.request(new LukeRequest());
assertEquals("SOLR-3428: We only did adds - there should be no deletes", ((NamedList<Object>) resp.get("index")).get("numDocs"), ((NamedList<Object>) resp.get("index")).get("maxDoc"));
}
testMaxRetries();
testOneRetry();
testRetryNodeAgainstBadAddress();
testRetryNodeWontRetrySocketError();
testDistribOpenSearcher();
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class TestBulkSchemaAPI method assertFieldSimilarity.
/**
* whitebox checks the Similarity for the specified field according to {@link SolrCore#getLatestSchema}
*
* Executes each of the specified Similarity-accepting validators.
*/
@SafeVarargs
private static <T extends Similarity> void assertFieldSimilarity(String fieldname, Class<T> expected, Consumer<T>... validators) {
CoreContainer cc = jetty.getCoreContainer();
try (SolrCore core = cc.getCore("collection1")) {
SimilarityFactory simfac = core.getLatestSchema().getSimilarityFactory();
assertNotNull(simfac);
assertTrue("test only works with SchemaSimilarityFactory", simfac instanceof SchemaSimilarityFactory);
Similarity mainSim = core.getLatestSchema().getSimilarity();
assertNotNull(mainSim);
// sanity check simfac vs sim in use - also verify infom called on simfac, otherwise exception
assertEquals(mainSim, simfac.getSimilarity());
assertTrue("test only works with PerFieldSimilarityWrapper, SchemaSimilarityFactory redefined?", mainSim instanceof PerFieldSimilarityWrapper);
Similarity fieldSim = ((PerFieldSimilarityWrapper) mainSim).get(fieldname);
assertEquals("wrong sim for field=" + fieldname, expected, fieldSim.getClass());
Arrays.asList(validators).forEach(v -> v.accept((T) fieldSim));
}
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class ChangedSchemaMergeTest method testOptimizeDiffSchemas.
@Test
public void testOptimizeDiffSchemas() throws Exception {
// load up a core (why not put it on disk?)
CoreContainer cc = init();
try (SolrCore changed = cc.getCore("changed")) {
assertSimilarity(changed, simfac1);
// add some documents
addDoc(changed, "id", "1", "which", "15", "text", "some stuff with which");
addDoc(changed, "id", "2", "which", "15", "text", "some stuff with which");
addDoc(changed, "id", "3", "which", "15", "text", "some stuff with which");
addDoc(changed, "id", "4", "which", "15", "text", "some stuff with which");
SolrQueryRequest req = new LocalSolrQueryRequest(changed, new NamedList<>());
changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
// write the new schema out and make it current
FileUtils.writeStringToFile(schemaFile, withoutWhich, StandardCharsets.UTF_8);
IndexSchema iSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", changed.getSolrConfig());
changed.setLatestSchema(iSchema);
assertSimilarity(changed, simfac2);
// sanity check our sanity check
assertFalse("test is broken: both simfacs are the same", simfac1.equals(simfac2));
addDoc(changed, "id", "1", "text", "some stuff without which");
addDoc(changed, "id", "5", "text", "some stuff without which");
changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
changed.getUpdateHandler().commit(new CommitUpdateCommand(req, true));
} catch (Throwable e) {
log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: " + e.getMessage(), e);
throw e;
} finally {
if (cc != null)
cc.shutdown();
}
}
use of org.apache.solr.core.CoreContainer in project lucene-solr by apache.
the class ChangedSchemaMergeTest method init.
private CoreContainer init() throws Exception {
File changed = new File(solrHomeDirectory, "changed");
copyMinConf(changed, "name=changed");
// Overlay with my local schema
schemaFile = new File(new File(changed, "conf"), "schema.xml");
FileUtils.writeStringToFile(schemaFile, withWhich, StandardCharsets.UTF_8);
String discoveryXml = "<solr></solr>";
File solrXml = new File(solrHomeDirectory, "solr.xml");
FileUtils.write(solrXml, discoveryXml, StandardCharsets.UTF_8);
final CoreContainer cores = new CoreContainer(solrHomeDirectory.getAbsolutePath());
cores.load();
return cores;
}
Aggregations