use of org.apache.solr.client.solrj.SolrServerException in project YCSB by brianfrankcooper.
the class SolrClient method update.
/**
* Update a record in the database. Any field/value pairs in the specified values HashMap will be
* written into the record with the specified record key, overwriting any existing values with the
* same field name.
*
* @param table
* The name of the table
* @param key
* The record key of the record to write.
* @param values
* A HashMap of field/value pairs to update in the record
* @return Zero on success, a non-zero error code on error. See this class's description for a
* discussion of error codes.
*/
@Override
public Status update(String table, String key, HashMap<String, ByteIterator> values) {
try {
SolrInputDocument updatedDoc = new SolrInputDocument();
updatedDoc.addField("id", key);
for (Entry<String, String> entry : StringByteIterator.getStringMap(values).entrySet()) {
updatedDoc.addField(entry.getKey(), Collections.singletonMap("set", entry.getValue()));
}
UpdateResponse writeResponse;
if (batchMode) {
writeResponse = client.add(table, updatedDoc, commitTime);
} else {
writeResponse = client.add(table, updatedDoc);
client.commit(table);
}
return checkStatus(writeResponse.getStatus());
} catch (IOException | SolrServerException e) {
e.printStackTrace();
}
return Status.ERROR;
}
use of org.apache.solr.client.solrj.SolrServerException in project YCSB by brianfrankcooper.
the class SolrClient method scan.
/**
* Perform a range scan for a set of records in the database. Each field/value pair from the
* result will be stored in a HashMap.
*
* @param table
* The name of the table
* @param startkey
* The record key of the first record to read.
* @param recordcount
* The number of records to read
* @param fields
* The list of fields to read, or null for all of them
* @param result
* A Vector of HashMaps, where each HashMap is a set field/value pairs for one record
* @return Zero on success, a non-zero error code on error. See this class's description for a
* discussion of error codes.
*/
@Override
public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
try {
Boolean returnFields = false;
String[] fieldList = null;
if (fields != null) {
returnFields = true;
fieldList = fields.toArray(new String[fields.size()]);
}
SolrQuery query = new SolrQuery();
query.setQuery("*:*");
query.setParam("fq", "id:[ " + startkey + " TO * ]");
if (returnFields) {
query.setFields(fieldList);
}
query.setRows(recordcount);
final QueryResponse response = client.query(table, query);
SolrDocumentList results = response.getResults();
HashMap<String, ByteIterator> entry;
for (SolrDocument hit : results) {
entry = new HashMap<>((int) results.getNumFound());
for (String field : hit.getFieldNames()) {
entry.put(field, new StringByteIterator(String.valueOf(hit.getFirstValue(field))));
}
result.add(entry);
}
return checkStatus(response.getStatus());
} catch (IOException | SolrServerException e) {
e.printStackTrace();
}
return Status.ERROR;
}
use of org.apache.solr.client.solrj.SolrServerException in project nifi by apache.
the class GetSolr method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
final AtomicBoolean continuePaging = new AtomicBoolean(true);
final SolrQuery solrQuery = new SolrQuery();
try {
if (id_field == null) {
id_field = getFieldNameOfUniqueKey();
}
final String dateField = context.getProperty(DATE_FIELD).getValue();
final Map<String, String> stateMap = new HashMap<String, String>();
stateMap.putAll(context.getStateManager().getState(Scope.CLUSTER).toMap());
solrQuery.setQuery("*:*");
final String query = context.getProperty(SOLR_QUERY).getValue();
if (!StringUtils.isBlank(query) && !query.equals("*:*")) {
solrQuery.addFilterQuery(query);
}
final StringBuilder automatedFilterQuery = (new StringBuilder()).append(dateField).append(":[").append(stateMap.get(STATE_MANAGER_FILTER)).append(" TO *]");
solrQuery.addFilterQuery(automatedFilterQuery.toString());
final List<String> fieldList = new ArrayList<String>();
final String returnFields = context.getProperty(RETURN_FIELDS).getValue();
if (!StringUtils.isBlank(returnFields)) {
fieldList.addAll(Arrays.asList(returnFields.trim().split("[,]")));
if (!fieldList.contains(dateField)) {
fieldList.add(dateField);
dateFieldNotInSpecifiedFieldsList.set(true);
}
for (String returnField : fieldList) {
solrQuery.addField(returnField.trim());
}
}
solrQuery.setParam(CursorMarkParams.CURSOR_MARK_PARAM, stateMap.get(STATE_MANAGER_CURSOR_MARK));
solrQuery.setRows(context.getProperty(BATCH_SIZE).asInteger());
final StringBuilder sortClause = (new StringBuilder()).append(dateField).append(" asc,").append(id_field).append(" asc");
solrQuery.setParam("sort", sortClause.toString());
while (continuePaging.get()) {
final QueryRequest req = new QueryRequest(solrQuery);
if (isBasicAuthEnabled()) {
req.setBasicAuthCredentials(getUsername(), getPassword());
}
logger.debug(solrQuery.toQueryString());
final QueryResponse response = req.process(getSolrClient());
final SolrDocumentList documentList = response.getResults();
if (response.getResults().size() > 0) {
final SolrDocument lastSolrDocument = documentList.get(response.getResults().size() - 1);
final String latestDateValue = df.format(lastSolrDocument.get(dateField));
final String newCursorMark = response.getNextCursorMark();
solrQuery.setParam(CursorMarkParams.CURSOR_MARK_PARAM, newCursorMark);
stateMap.put(STATE_MANAGER_CURSOR_MARK, newCursorMark);
stateMap.put(STATE_MANAGER_FILTER, latestDateValue);
FlowFile flowFile = session.create();
flowFile = session.putAttribute(flowFile, "solrQuery", solrQuery.toString());
if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())) {
if (dateFieldNotInSpecifiedFieldsList.get()) {
for (SolrDocument doc : response.getResults()) {
doc.removeFields(dateField);
}
}
flowFile = session.write(flowFile, SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response));
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), "application/xml");
} else {
final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class);
final RecordSchema schema = writerFactory.getSchema(null, null);
final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema);
final StringBuffer mimeType = new StringBuffer();
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
try {
final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out);
writer.write(recordSet);
writer.flush();
mimeType.append(writer.getMimeType());
} catch (SchemaNotFoundException e) {
throw new ProcessException("Could not parse Solr response", e);
}
}
});
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), mimeType.toString());
}
session.transfer(flowFile, REL_SUCCESS);
}
continuePaging.set(response.getResults().size() == Integer.parseInt(context.getProperty(BATCH_SIZE).getValue()));
}
context.getStateManager().setState(stateMap, Scope.CLUSTER);
} catch (SolrServerException | SchemaNotFoundException | IOException e) {
context.yield();
session.rollback();
logger.error("Failed to execute query {} due to {}", new Object[] { solrQuery.toString(), e }, e);
throw new ProcessException(e);
} catch (final Throwable t) {
context.yield();
session.rollback();
logger.error("Failed to execute query {} due to {}", new Object[] { solrQuery.toString(), t }, t);
throw t;
}
}
use of org.apache.solr.client.solrj.SolrServerException in project nifi by apache.
the class PutSolrContentStream method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final AtomicReference<Exception> error = new AtomicReference<>(null);
final AtomicReference<Exception> connectionError = new AtomicReference<>(null);
final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFile).getValue();
final Long commitWithin = context.getProperty(COMMIT_WITHIN).evaluateAttributeExpressions(flowFile).asLong();
final String contentStreamPath = context.getProperty(CONTENT_STREAM_PATH).evaluateAttributeExpressions(flowFile).getValue();
final MultiMapSolrParams requestParams = new MultiMapSolrParams(getRequestParams(context, flowFile));
StopWatch timer = new StopWatch(true);
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(contentStreamPath);
request.setParams(new ModifiableSolrParams());
// add the extra params, don't use 'set' in case of repeating params
Iterator<String> paramNames = requestParams.getParameterNamesIterator();
while (paramNames.hasNext()) {
String paramName = paramNames.next();
for (String paramValue : requestParams.getParams(paramName)) {
request.getParams().add(paramName, paramValue);
}
}
// specify the collection for SolrCloud
if (isSolrCloud) {
request.setParam(COLLECTION_PARAM_NAME, collection);
}
if (commitWithin != null && commitWithin > 0) {
request.setParam(COMMIT_WITHIN_PARAM_NAME, commitWithin.toString());
}
// if a username and password were provided then pass them for basic auth
if (isBasicAuthEnabled()) {
request.setBasicAuthCredentials(getUsername(), getPassword());
}
try (final BufferedInputStream bufferedIn = new BufferedInputStream(in)) {
// add the FlowFile's content on the UpdateRequest
request.addContentStream(new ContentStreamBase() {
@Override
public InputStream getStream() throws IOException {
return bufferedIn;
}
@Override
public String getContentType() {
return context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions().getValue();
}
});
UpdateResponse response = request.process(getSolrClient());
getLogger().debug("Got {} response from Solr", new Object[] { response.getStatus() });
} catch (SolrException e) {
error.set(e);
} catch (SolrServerException e) {
if (causedByIOException(e)) {
connectionError.set(e);
} else {
error.set(e);
}
} catch (IOException e) {
connectionError.set(e);
}
}
});
timer.stop();
if (error.get() != null) {
getLogger().error("Failed to send {} to Solr due to {}; routing to failure", new Object[] { flowFile, error.get() });
session.transfer(flowFile, REL_FAILURE);
} else if (connectionError.get() != null) {
getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure", new Object[] { flowFile, connectionError.get() });
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_CONNECTION_FAILURE);
} else {
StringBuilder transitUri = new StringBuilder("solr://");
transitUri.append(getSolrLocation());
if (isSolrCloud) {
transitUri.append(":").append(collection);
}
final long duration = timer.getDuration(TimeUnit.MILLISECONDS);
session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true);
getLogger().info("Successfully sent {} to Solr in {} millis", new Object[] { flowFile, duration });
session.transfer(flowFile, REL_SUCCESS);
}
}
use of org.apache.solr.client.solrj.SolrServerException in project nifi by apache.
the class TestPutSolrContentStream method testUpdateWithSolrJson.
@Test
public void testUpdateWithSolrJson() throws IOException, SolrServerException {
final SolrClient solrClient = createEmbeddedSolrClient(DEFAULT_SOLR_CORE);
final TestableProcessor proc = new TestableProcessor(solrClient);
final TestRunner runner = createDefaultTestRunner(proc);
runner.setProperty(PutSolrContentStream.CONTENT_STREAM_PATH, "/update/json/docs");
runner.setProperty("json.command", "false");
try (FileInputStream fileIn = new FileInputStream(SOLR_JSON_MULTIPLE_DOCS_FILE)) {
runner.enqueue(fileIn);
runner.run(1, false);
runner.assertTransferCount(PutSolrContentStream.REL_FAILURE, 0);
runner.assertTransferCount(PutSolrContentStream.REL_CONNECTION_FAILURE, 0);
runner.assertTransferCount(PutSolrContentStream.REL_SUCCESS, 1);
verifySolrDocuments(proc.getSolrClient(), Arrays.asList(expectedDoc1, expectedDoc2));
} finally {
try {
proc.getSolrClient().close();
} catch (Exception e) {
}
}
}
Aggregations