use of org.apache.solr.client.solrj.impl.HttpSolrClient in project lucene-solr by apache.
the class TestSolrCLIRunExample method testExample.
protected void testExample(String exampleName) throws Exception {
File solrHomeDir = new File(ExternalPaths.SERVER_HOME);
if (!solrHomeDir.isDirectory())
fail(solrHomeDir.getAbsolutePath() + " not found and is required to run this test!");
Path tmpDir = createTempDir();
File solrExampleDir = tmpDir.toFile();
File solrServerDir = solrHomeDir.getParentFile();
for (int pass = 0; pass < 2; pass++) {
// need a port to start the example server on
int bindPort = -1;
try (ServerSocket socket = new ServerSocket(0)) {
bindPort = socket.getLocalPort();
}
log.info("Selected port " + bindPort + " to start " + exampleName + " example Solr instance on ...");
String[] toolArgs = new String[] { "-e", exampleName, "-serverDir", solrServerDir.getAbsolutePath(), "-exampleDir", solrExampleDir.getAbsolutePath(), "-p", String.valueOf(bindPort) };
// capture tool output to stdout
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name());
RunExampleExecutor executor = new RunExampleExecutor(stdoutSim);
closeables.add(executor);
SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim);
try {
final int status = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs));
assertEquals("it should be ok " + tool + " " + Arrays.toString(toolArgs), 0, status);
} catch (Exception e) {
log.error("RunExampleTool failed due to: " + e + "; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name()));
throw e;
}
String toolOutput = baos.toString(StandardCharsets.UTF_8.name());
// dump all the output written by the SolrCLI commands to stdout
//System.out.println("\n\n"+toolOutput+"\n\n");
File exampleSolrHomeDir = new File(solrExampleDir, exampleName + "/solr");
assertTrue(exampleSolrHomeDir.getAbsolutePath() + " not found! run " + exampleName + " example failed; output: " + toolOutput, exampleSolrHomeDir.isDirectory());
if ("techproducts".equals(exampleName)) {
HttpSolrClient solrClient = getHttpSolrClient("http://localhost:" + bindPort + "/solr/" + exampleName);
try {
SolrQuery query = new SolrQuery("*:*");
QueryResponse qr = solrClient.query(query);
long numFound = qr.getResults().getNumFound();
if (numFound == 0) {
// brief wait in case of timing issue in getting the new docs committed
log.warn("Going to wait for 1 second before re-trying query for techproduct example docs ...");
try {
Thread.sleep(1000);
} catch (InterruptedException ignore) {
Thread.interrupted();
}
numFound = solrClient.query(query).getResults().getNumFound();
}
assertTrue("expected 32 docs in the " + exampleName + " example but found " + numFound + ", output: " + toolOutput, numFound == 32);
} finally {
solrClient.close();
}
}
// stop the test instance
executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p " + bindPort));
}
}
use of org.apache.solr.client.solrj.impl.HttpSolrClient in project lucene-solr by apache.
the class TopicStream method getPersistedCheckpoints.
private void getPersistedCheckpoints() throws IOException {
ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
Collection<Slice> slices = CloudSolrStream.getSlices(checkpointCollection, zkStateReader, false);
ClusterState clusterState = zkStateReader.getClusterState();
Set<String> liveNodes = clusterState.getLiveNodes();
OUTER: for (Slice slice : slices) {
Collection<Replica> replicas = slice.getReplicas();
for (Replica replica : replicas) {
if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
HttpSolrClient httpClient = streamContext.getSolrClientCache().getHttpSolrClient(replica.getCoreUrl());
try {
SolrDocument doc = httpClient.getById(id);
if (doc != null) {
List<String> checkpoints = (List<String>) doc.getFieldValue("checkpoint_ss");
for (String checkpoint : checkpoints) {
String[] pair = checkpoint.split("~");
this.checkpoints.put(pair[0], Long.parseLong(pair[1]));
}
}
} catch (Exception e) {
throw new IOException(e);
}
break OUTER;
}
}
}
}
use of org.apache.solr.client.solrj.impl.HttpSolrClient in project lucene-solr by apache.
the class TestRemoteStreaming method makeDeleteAllUrl.
/** Compose a url that if you get it, it will delete all the data. */
private String makeDeleteAllUrl() throws UnsupportedEncodingException {
HttpSolrClient client = (HttpSolrClient) getSolrClient();
String deleteQuery = "<delete><query>*:*</query></delete>";
return client.getBaseURL() + "/update?commit=true&stream.body=" + URLEncoder.encode(deleteQuery, "UTF-8");
}
use of org.apache.solr.client.solrj.impl.HttpSolrClient in project lucene-solr by apache.
the class TestRemoteStreaming method testStreamUrl.
@Test
public void testStreamUrl() throws Exception {
HttpSolrClient client = (HttpSolrClient) getSolrClient();
String streamUrl = client.getBaseURL() + "/select?q=*:*&fl=id&wt=csv";
String getUrl = client.getBaseURL() + "/debug/dump?wt=xml&stream.url=" + URLEncoder.encode(streamUrl, "UTF-8");
String content = getUrlForString(getUrl);
assertTrue(content.contains("1234"));
//System.out.println(content);
}
use of org.apache.solr.client.solrj.impl.HttpSolrClient in project gora by apache.
the class SolrStore method initialize.
/**
* Initialize the data store by reading the credentials, setting the client's properties up and
* reading the mapping file. Initialize is called when then the call to
* {@link org.apache.gora.store.DataStoreFactory#createDataStore} is made.
*
* @param keyClass
* @param persistentClass
* @param properties
*/
@Override
public void initialize(Class<K> keyClass, Class<T> persistentClass, Properties properties) {
super.initialize(keyClass, persistentClass, properties);
try {
String mappingFile = DataStoreFactory.getMappingFile(properties, this, DEFAULT_MAPPING_FILE);
mapping = readMapping(mappingFile);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
SolrClientUrl = DataStoreFactory.findProperty(properties, this, SOLR_URL_PROPERTY, null);
solrConfig = DataStoreFactory.findProperty(properties, this, SOLR_CONFIG_PROPERTY, null);
solrSchema = DataStoreFactory.findProperty(properties, this, SOLR_SCHEMA_PROPERTY, null);
solrJServerImpl = DataStoreFactory.findProperty(properties, this, SOLR_SOLRJSERVER_IMPL, "http");
serverUserAuth = DataStoreFactory.findBooleanProperty(properties, this, SOLR_SERVER_USER_AUTH, "false");
if (serverUserAuth) {
serverUsername = DataStoreFactory.findProperty(properties, this, SOLR_SERVER_USERNAME, null);
serverPassword = DataStoreFactory.findProperty(properties, this, SOLR_SERVER_PASSWORD, null);
}
LOG.info("Using Solr server at " + SolrClientUrl);
String solrJServerType = ((solrJServerImpl == null || solrJServerImpl.equals("")) ? "http" : solrJServerImpl);
// HttpSolrClient - denoted by "http" in properties
if (solrJServerType.toLowerCase(Locale.getDefault()).equals("http")) {
LOG.info("Using HttpSolrClient Solrj implementation.");
this.adminServer = new HttpSolrClient(SolrClientUrl);
this.server = new HttpSolrClient(SolrClientUrl + "/" + mapping.getCoreName());
if (serverUserAuth) {
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((HttpSolrClient) adminServer).getHttpClient(), serverUsername, serverPassword);
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((HttpSolrClient) server).getHttpClient(), serverUsername, serverPassword);
}
// CloudSolrClient - denoted by "cloud" in properties
} else if (solrJServerType.toLowerCase(Locale.getDefault()).equals("cloud")) {
LOG.info("Using CloudSolrClient Solrj implementation.");
this.adminServer = new CloudSolrClient(SolrClientUrl);
this.server = new CloudSolrClient(SolrClientUrl + "/" + mapping.getCoreName());
if (serverUserAuth) {
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((CloudSolrClient) adminServer).getLbClient().getHttpClient(), serverUsername, serverPassword);
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((CloudSolrClient) server).getLbClient().getHttpClient(), serverUsername, serverPassword);
}
} else if (solrJServerType.toLowerCase(Locale.getDefault()).equals("concurrent")) {
LOG.info("Using ConcurrentUpdateSolrClient Solrj implementation.");
this.adminServer = new ConcurrentUpdateSolrClient(SolrClientUrl, 1000, 10);
this.server = new ConcurrentUpdateSolrClient(SolrClientUrl + "/" + mapping.getCoreName(), 1000, 10);
// LBHttpSolrClient - denoted by "loadbalance" in properties
} else if (solrJServerType.toLowerCase(Locale.getDefault()).equals("loadbalance")) {
LOG.info("Using LBHttpSolrClient Solrj implementation.");
String[] solrUrlElements = StringUtils.split(SolrClientUrl);
try {
this.adminServer = new LBHttpSolrClient(solrUrlElements);
} catch (MalformedURLException e) {
LOG.error(e.getMessage());
throw new RuntimeException(e);
}
try {
this.server = new LBHttpSolrClient(solrUrlElements + "/" + mapping.getCoreName());
} catch (MalformedURLException e) {
LOG.error(e.getMessage());
throw new RuntimeException(e);
}
if (serverUserAuth) {
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((LBHttpSolrClient) adminServer).getHttpClient(), serverUsername, serverPassword);
HttpClientUtil.setBasicAuth((DefaultHttpClient) ((LBHttpSolrClient) server).getHttpClient(), serverUsername, serverPassword);
}
}
if (autoCreateSchema) {
createSchema();
}
String batchSizeString = DataStoreFactory.findProperty(properties, this, SOLR_BATCH_SIZE_PROPERTY, null);
if (batchSizeString != null) {
try {
batchSize = Integer.parseInt(batchSizeString);
} catch (NumberFormatException nfe) {
LOG.warn("Invalid batch size '{}', using default {}", batchSizeString, DEFAULT_BATCH_SIZE);
}
}
batch = new ArrayList<>(batchSize);
String commitWithinString = DataStoreFactory.findProperty(properties, this, SOLR_COMMIT_WITHIN_PROPERTY, null);
if (commitWithinString != null) {
try {
commitWithin = Integer.parseInt(commitWithinString);
} catch (NumberFormatException nfe) {
LOG.warn("Invalid commit within '{}' , using default {}", commitWithinString, DEFAULT_COMMIT_WITHIN);
}
}
String resultsSizeString = DataStoreFactory.findProperty(properties, this, SOLR_RESULTS_SIZE_PROPERTY, null);
if (resultsSizeString != null) {
try {
resultsSize = Integer.parseInt(resultsSizeString);
} catch (NumberFormatException nfe) {
LOG.warn("Invalid results size '{}' , using default {}", resultsSizeString, DEFAULT_RESULTS_SIZE);
}
}
}
Aggregations