use of org.apache.solr.common.SolrInputDocument in project titan by thinkaurelius.
the class SolrIndex method mutate.
@Override
public void mutate(Map<String, Map<String, IndexMutation>> mutations, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException {
logger.debug("Mutating SOLR");
try {
for (Map.Entry<String, Map<String, IndexMutation>> stores : mutations.entrySet()) {
String collectionName = stores.getKey();
String keyIdField = getKeyFieldId(collectionName);
List<String> deleteIds = new ArrayList<String>();
Collection<SolrInputDocument> changes = new ArrayList<SolrInputDocument>();
for (Map.Entry<String, IndexMutation> entry : stores.getValue().entrySet()) {
String docId = entry.getKey();
IndexMutation mutation = entry.getValue();
Preconditions.checkArgument(!(mutation.isNew() && mutation.isDeleted()));
Preconditions.checkArgument(!mutation.isNew() || !mutation.hasDeletions());
Preconditions.checkArgument(!mutation.isDeleted() || !mutation.hasAdditions());
//Handle any deletions
if (mutation.hasDeletions()) {
if (mutation.isDeleted()) {
logger.trace("Deleting entire document {}", docId);
deleteIds.add(docId);
} else {
HashSet<IndexEntry> fieldDeletions = Sets.newHashSet(mutation.getDeletions());
if (mutation.hasAdditions()) {
for (IndexEntry indexEntry : mutation.getAdditions()) {
fieldDeletions.remove(indexEntry);
}
}
deleteIndividualFieldsFromIndex(collectionName, keyIdField, docId, fieldDeletions);
}
}
if (mutation.hasAdditions()) {
int ttl = mutation.determineTTL();
SolrInputDocument doc = new SolrInputDocument();
doc.setField(keyIdField, docId);
boolean isNewDoc = mutation.isNew();
if (isNewDoc)
logger.trace("Adding new document {}", docId);
for (IndexEntry e : mutation.getAdditions()) {
final Object fieldValue = convertValue(e.value);
doc.setField(e.field, isNewDoc ? fieldValue : new HashMap<String, Object>(1) {
{
put("set", fieldValue);
}
});
}
if (ttl > 0) {
Preconditions.checkArgument(isNewDoc, "Solr only supports TTL on new documents [%s]", docId);
doc.setField(ttlField, String.format("+%dSECONDS", ttl));
}
changes.add(doc);
}
}
commitDeletes(collectionName, deleteIds);
commitDocumentChanges(collectionName, changes);
}
} catch (IllegalArgumentException e) {
throw new PermanentBackendException("Unable to complete query on Solr.", e);
} catch (Exception e) {
throw storageException(e);
}
}
use of org.apache.solr.common.SolrInputDocument in project titan by thinkaurelius.
the class SolrIndex method deleteIndividualFieldsFromIndex.
private void deleteIndividualFieldsFromIndex(String collectionName, String keyIdField, String docId, HashSet<IndexEntry> fieldDeletions) throws SolrServerException, IOException {
if (fieldDeletions.isEmpty())
return;
Map<String, String> fieldDeletes = new HashMap<String, String>(1) {
{
put("set", null);
}
};
SolrInputDocument doc = new SolrInputDocument();
doc.addField(keyIdField, docId);
StringBuilder sb = new StringBuilder();
for (IndexEntry fieldToDelete : fieldDeletions) {
doc.addField(fieldToDelete.field, fieldDeletes);
sb.append(fieldToDelete).append(",");
}
if (logger.isTraceEnabled())
logger.trace("Deleting individual fields [{}] for document {}", sb.toString(), docId);
UpdateRequest singleDocument = newUpdateRequest();
singleDocument.add(doc);
solrClient.request(singleDocument, collectionName);
}
use of org.apache.solr.common.SolrInputDocument in project YCSB by brianfrankcooper.
the class SolrClient method insert.
/**
* Insert a record in the database. Any field/value pairs in the specified values HashMap will be
* written into the record with the specified record key.
*
* @param table
* The name of the table
* @param key
* The record key of the record to insert.
* @param values
* A HashMap of field/value pairs to insert in the record
* @return Zero on success, a non-zero error code on error. See this class's description for a
* discussion of error codes.
*/
@Override
public Status insert(String table, String key, HashMap<String, ByteIterator> values) {
try {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", key);
for (Entry<String, String> entry : StringByteIterator.getStringMap(values).entrySet()) {
doc.addField(entry.getKey(), entry.getValue());
}
UpdateResponse response;
if (batchMode) {
response = client.add(table, doc, commitTime);
} else {
response = client.add(table, doc);
client.commit(table);
}
return checkStatus(response.getStatus());
} catch (IOException | SolrServerException e) {
e.printStackTrace();
}
return Status.ERROR;
}
use of org.apache.solr.common.SolrInputDocument in project YCSB by brianfrankcooper.
the class SolrClient method update.
/**
* Update a record in the database. Any field/value pairs in the specified values HashMap will be
* written into the record with the specified record key, overwriting any existing values with the
* same field name.
*
* @param table
* The name of the table
* @param key
* The record key of the record to write.
* @param values
* A HashMap of field/value pairs to update in the record
* @return Zero on success, a non-zero error code on error. See this class's description for a
* discussion of error codes.
*/
@Override
public Status update(String table, String key, HashMap<String, ByteIterator> values) {
try {
SolrInputDocument updatedDoc = new SolrInputDocument();
updatedDoc.addField("id", key);
for (Entry<String, String> entry : StringByteIterator.getStringMap(values).entrySet()) {
updatedDoc.addField(entry.getKey(), Collections.singletonMap("set", entry.getValue()));
}
UpdateResponse writeResponse;
if (batchMode) {
writeResponse = client.add(table, updatedDoc, commitTime);
} else {
writeResponse = client.add(table, updatedDoc);
client.commit(table);
}
return checkStatus(writeResponse.getStatus());
} catch (IOException | SolrServerException e) {
e.printStackTrace();
}
return Status.ERROR;
}
use of org.apache.solr.common.SolrInputDocument in project gora by apache.
the class SolrStore method put.
@Override
public void put(K key, T persistent) {
Schema schema = persistent.getSchema();
if (!persistent.isDirty()) {
// nothing to do
return;
}
SolrInputDocument doc = new SolrInputDocument();
// add primary key
doc.addField(mapping.getPrimaryKey(), key);
// populate the doc
List<Field> fields = schema.getFields();
for (Field field : fields) {
String sf = mapping.getSolrField(field.name());
// mapping won't find the primary
if (sf == null) {
continue;
}
Schema fieldSchema = field.schema();
Object v = persistent.get(field.pos());
if (v == null) {
continue;
}
v = serializeFieldValue(fieldSchema, v);
doc.addField(sf, v);
}
LOG.info("Putting DOCUMENT: " + doc);
batch.add(doc);
if (batch.size() >= batchSize) {
try {
add(batch, commitWithin);
batch.clear();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
}
}
Aggregations