use of org.apache.solr.client.solrj.request.ContentStreamUpdateRequest in project streamline by hortonworks.
the class StreamlineSolrJsonMapper method createSolrRequest.
private SolrRequest<UpdateResponse> createSolrRequest(String json) {
final ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(jsonUpdateUrl);
final ContentStream cs = new ContentStreamBase.StringStream(json, CONTENT_TYPE);
request.addContentStream(cs);
LOG.debug("Request generated with JSON: {}", json);
return request;
}
use of org.apache.solr.client.solrj.request.ContentStreamUpdateRequest in project lucene-solr by apache.
the class SolrCloudExampleTest method testLoadDocsIntoGettingStartedCollection.
@Test
public void testLoadDocsIntoGettingStartedCollection() throws Exception {
waitForThingsToLevelOut(30000);
log.info("testLoadDocsIntoGettingStartedCollection initialized OK ... running test logic");
String testCollectionName = "gettingstarted";
File data_driven_schema_configs = new File(ExternalPaths.SCHEMALESS_CONFIGSET);
assertTrue(data_driven_schema_configs.getAbsolutePath() + " not found!", data_driven_schema_configs.isDirectory());
Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
if (liveNodes.isEmpty())
fail("No live nodes found! Cannot create a collection until there is at least 1 live node in the cluster.");
String firstLiveNode = liveNodes.iterator().next();
String solrUrl = cloudClient.getZkStateReader().getBaseUrlForNodeName(firstLiveNode);
// create the gettingstarted collection just like the bin/solr script would do
String[] args = new String[] { "-name", testCollectionName, "-shards", "2", "-replicationFactor", "2", "-confname", testCollectionName, "-confdir", "data_driven_schema_configs", "-configsetsDir", data_driven_schema_configs.getParentFile().getParentFile().getAbsolutePath(), "-solrUrl", solrUrl };
// NOTE: not calling SolrCLI.main as the script does because it calls System.exit which is a no-no in a JUnit test
SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Creating the '" + testCollectionName + "' collection using SolrCLI with: " + solrUrl);
tool.runTool(cli);
assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!", cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName));
// verify the collection is usable ...
ensureAllReplicasAreActive(testCollectionName, "shard1", 2, 2, 20);
ensureAllReplicasAreActive(testCollectionName, "shard2", 2, 2, 10);
cloudClient.setDefaultCollection(testCollectionName);
int invalidToolExitStatus = 1;
assertEquals("Collection '" + testCollectionName + "' created even though it already existed", invalidToolExitStatus, tool.runTool(cli));
// now index docs like bin/post would do but we can't use SimplePostTool because it uses System.exit when
// it encounters an error, which JUnit doesn't like ...
log.info("Created collection, now posting example docs!");
File exampleDocsDir = new File(ExternalPaths.SOURCE_HOME, "example/exampledocs");
assertTrue(exampleDocsDir.getAbsolutePath() + " not found!", exampleDocsDir.isDirectory());
List<File> xmlFiles = Arrays.asList(exampleDocsDir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".xml");
}
}));
// force a deterministic random ordering of the files so seeds reproduce regardless of platform/filesystem
Collections.sort(xmlFiles, (o1, o2) -> {
// don't rely on File.compareTo, it's behavior varies by OS
return o1.getName().compareTo(o2.getName());
});
Collections.shuffle(xmlFiles, new Random(random().nextLong()));
// if you add/remove example XML docs, you'll have to fix these expected values
int expectedXmlFileCount = 14;
int expectedXmlDocCount = 32;
assertEquals("Unexpected # of example XML files in " + exampleDocsDir.getAbsolutePath(), expectedXmlFileCount, xmlFiles.size());
for (File xml : xmlFiles) {
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addFile(xml, "application/xml");
log.info("POSTing " + xml.getAbsolutePath());
cloudClient.request(req);
}
cloudClient.commit();
int numFound = 0;
// give the update a chance to take effect.
for (int idx = 0; idx < 100; ++idx) {
QueryResponse qr = cloudClient.query(new SolrQuery("*:*"));
numFound = (int) qr.getResults().getNumFound();
if (numFound == expectedXmlDocCount)
break;
Thread.sleep(100);
}
assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound);
log.info("Updating Config for " + testCollectionName);
doTestConfigUpdate(testCollectionName, solrUrl);
log.info("Running healthcheck for " + testCollectionName);
doTestHealthcheck(testCollectionName, cloudClient.getZkHost());
// verify the delete action works too
log.info("Running delete for " + testCollectionName);
doTestDeleteAction(testCollectionName, solrUrl);
log.info("testLoadDocsIntoGettingStartedCollection succeeded ... shutting down now!");
}
use of org.apache.solr.client.solrj.request.ContentStreamUpdateRequest in project lucene-solr by apache.
the class BasicDistributedZkTest method testNumberOfCommitsWithCommitAfterAdd.
private void testNumberOfCommitsWithCommitAfterAdd() throws SolrServerException, IOException {
log.info("### STARTING testNumberOfCommitsWithCommitAfterAdd");
long startCommits = getNumCommits((HttpSolrClient) clients.get(0));
ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update");
up.addFile(getFile("books_numeric_ids.csv"), "application/csv");
up.setCommitWithin(900000);
up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
NamedList<Object> result = clients.get(0).request(up);
long endCommits = getNumCommits((HttpSolrClient) clients.get(0));
assertEquals(startCommits + 1L, endCommits);
}
use of org.apache.solr.client.solrj.request.ContentStreamUpdateRequest in project camel by apache.
the class SolrProducer method insert.
private void insert(Exchange exchange, SolrClient solrServer) throws Exception {
Object body = exchange.getIn().getBody();
boolean invalid = false;
if (body instanceof WrappedFile) {
body = ((WrappedFile<?>) body).getFile();
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class))) {
String mimeType = exchange.getIn().getHeader(Exchange.CONTENT_TYPE, String.class);
ContentStreamUpdateRequest updateRequest = new ContentStreamUpdateRequest(getRequestHandler());
updateRequest.addFile((File) body, mimeType);
for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
if (entry.getKey().startsWith(SolrConstants.PARAM)) {
String paramName = entry.getKey().substring(SolrConstants.PARAM.length());
updateRequest.setParam(paramName, entry.getValue().toString());
}
}
updateRequest.process(solrServer);
} else {
if (body instanceof File) {
MimetypesFileTypeMap mimeTypesMap = new MimetypesFileTypeMap();
String mimeType = mimeTypesMap.getContentType((File) body);
ContentStreamUpdateRequest updateRequest = new ContentStreamUpdateRequest(getRequestHandler());
updateRequest.addFile((File) body, mimeType);
for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
if (entry.getKey().startsWith(SolrConstants.PARAM)) {
String paramName = entry.getKey().substring(SolrConstants.PARAM.length());
updateRequest.setParam(paramName, entry.getValue().toString());
}
}
updateRequest.process(solrServer);
} else if (body instanceof SolrInputDocument) {
UpdateRequest updateRequest = new UpdateRequest(getRequestHandler());
updateRequest.add((SolrInputDocument) body);
updateRequest.process(solrServer);
} else if (body instanceof List<?>) {
List<?> list = (List<?>) body;
if (list.size() > 0 && list.get(0) instanceof SolrInputDocument) {
UpdateRequest updateRequest = new UpdateRequest(getRequestHandler());
updateRequest.add((List<SolrInputDocument>) list);
updateRequest.process(solrServer);
} else {
invalid = true;
}
} else {
boolean hasSolrHeaders = false;
for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
if (entry.getKey().startsWith(SolrConstants.FIELD)) {
hasSolrHeaders = true;
break;
}
}
if (hasSolrHeaders) {
UpdateRequest updateRequest = new UpdateRequest(getRequestHandler());
SolrInputDocument doc = new SolrInputDocument();
for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
if (entry.getKey().startsWith(SolrConstants.FIELD)) {
String fieldName = entry.getKey().substring(SolrConstants.FIELD.length());
doc.setField(fieldName, entry.getValue());
}
}
updateRequest.add(doc);
updateRequest.process(solrServer);
} else if (body instanceof String) {
String bodyAsString = (String) body;
if (!bodyAsString.startsWith("<add")) {
bodyAsString = "<add>" + bodyAsString + "</add>";
}
DirectXmlRequest xmlRequest = new DirectXmlRequest(getRequestHandler(), bodyAsString);
solrServer.request(xmlRequest);
} else {
invalid = true;
}
}
}
if (invalid) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unable to find data in Exchange to update Solr");
}
}
use of org.apache.solr.client.solrj.request.ContentStreamUpdateRequest in project mycore by MyCoRe-Org.
the class MCRSolrFileIndexHandler method index.
@Override
public void index() throws SolrServerException, IOException {
String solrID = file.toUri().toString();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Solr: indexing file \"{}\"", file);
}
/* create the update request object */
ContentStreamUpdateRequest updateRequest = new ContentStreamUpdateRequest(EXTRACT_PATH);
updateRequest.addContentStream(getStream());
/* set the additional parameters */
updateRequest.setParams(getSolrParams(file, attrs));
updateRequest.setCommitWithin(getCommitWithin());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Solr: sending binary data ({} ({}), size is {}) to solr server.", file, solrID, MCRUtils.getSizeFormatted(attrs.size()));
}
long t = System.currentTimeMillis();
/* actually send the request */
getSolrClient().request(updateRequest);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Solr: sending binary data \"{} ({})\" done in {}ms", file, solrID, System.currentTimeMillis() - t);
}
}
Aggregations