use of org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key in project jackrabbit-oak by apache.
the class MongoDocumentStore method remove.
@Override
public <T extends Document> int remove(Collection<T> collection, Map<String, Map<Key, Condition>> toRemove) {
log("remove", toRemove);
int num = 0;
DBCollection dbCollection = getDBCollection(collection);
Stopwatch watch = startWatch();
try {
List<String> batchIds = Lists.newArrayList();
List<DBObject> batch = Lists.newArrayList();
Iterator<Entry<String, Map<Key, Condition>>> it = toRemove.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Map<Key, Condition>> entry = it.next();
QueryBuilder query = createQueryForUpdate(entry.getKey(), entry.getValue());
batchIds.add(entry.getKey());
batch.add(query.get());
if (!it.hasNext() || batch.size() == IN_CLAUSE_BATCH_SIZE) {
DBObject q = new BasicDBObject();
q.put(QueryOperators.OR, batch);
try {
num += dbCollection.remove(q).getN();
} catch (Exception e) {
throw DocumentStoreException.convert(e, "Remove failed for " + batch);
} finally {
if (collection == Collection.NODES) {
invalidateCache(batchIds);
}
}
batchIds.clear();
batch.clear();
}
}
} finally {
stats.doneRemove(watch.elapsed(TimeUnit.NANOSECONDS), collection, num);
}
return num;
}
use of org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key in project jackrabbit-oak by apache.
the class RDBDocumentStore method delete.
private <T extends Document> int delete(Collection<T> collection, Map<String, Map<Key, Condition>> toRemove) {
int numDeleted = 0;
RDBTableMetaData tmd = getTable(collection);
Map<String, Map<Key, Condition>> subMap = Maps.newHashMap();
Iterator<Entry<String, Map<Key, Condition>>> it = toRemove.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Map<Key, Condition>> entry = it.next();
subMap.put(entry.getKey(), entry.getValue());
if (subMap.size() == 64 || !it.hasNext()) {
Connection connection = null;
int num = 0;
Stopwatch watch = startWatch();
try {
connection = this.ch.getRWConnection();
num = db.delete(connection, tmd, subMap);
numDeleted += num;
connection.commit();
} catch (Exception ex) {
Set<String> ids = subMap.keySet();
throw handleException("deleting " + ids, ex, collection, ids);
} finally {
this.ch.closeConnection(connection);
stats.doneRemove(watch.elapsed(TimeUnit.NANOSECONDS), collection, num);
}
subMap.clear();
}
}
return numDeleted;
}
use of org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key in project jackrabbit-oak by apache.
the class RDBDocumentStoreJDBC method delete.
public int delete(Connection connection, RDBTableMetaData tmd, Map<String, Map<Key, Condition>> toDelete) throws SQLException, DocumentStoreException {
String or = "";
StringBuilder whereClause = new StringBuilder();
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
whereClause.append(or);
or = " or ";
whereClause.append("ID=?");
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (!c.getKey().getName().equals(MODIFIED)) {
throw new DocumentStoreException("Unsupported condition: " + c);
}
whereClause.append(" and MODIFIED");
if (c.getValue().type == Condition.Type.EQUALS && c.getValue().value instanceof Long) {
whereClause.append("=?");
} else if (c.getValue().type == Condition.Type.EXISTS) {
whereClause.append(" is not null");
} else {
throw new DocumentStoreException("Unsupported condition: " + c);
}
}
}
PreparedStatement stmt = connection.prepareStatement("delete from " + tmd.getName() + " where " + whereClause);
try {
int i = 1;
for (Entry<String, Map<Key, Condition>> entry : toDelete.entrySet()) {
setIdInStatement(tmd, stmt, i++, entry.getKey());
for (Entry<Key, Condition> c : entry.getValue().entrySet()) {
if (c.getValue().type == Condition.Type.EQUALS) {
stmt.setLong(i++, (Long) c.getValue().value);
}
}
}
return stmt.executeUpdate();
} finally {
stmt.close();
}
}
use of org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key in project jackrabbit-oak by apache.
the class RDBDocumentStore method hasChangesToCollisions.
private static boolean hasChangesToCollisions(UpdateOp update) {
if (!USECMODCOUNT) {
return false;
} else {
for (Entry<Key, Operation> e : checkNotNull(update).getChanges().entrySet()) {
Key k = e.getKey();
Operation op = e.getValue();
if (op.type == Operation.Type.SET_MAP_ENTRY) {
if (NodeDocument.COLLISIONS.equals(k.getName())) {
return true;
}
}
}
return false;
}
}
use of org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key in project jackrabbit-oak by apache.
the class RDBDocumentSerializer method asString.
/**
* Serializes the changes in the {@link UpdateOp} into a JSON array; each
* entry is another JSON array holding operation, key, revision, and value.
*/
public String asString(UpdateOp update, Set<String> columnProperties) {
StringBuilder sb = new StringBuilder("[");
boolean needComma = false;
for (Map.Entry<Key, Operation> change : update.getChanges().entrySet()) {
Operation op = change.getValue();
Key key = change.getKey();
// exclude properties that are serialized into special columns
if (columnProperties.contains(key.getName()) && null == key.getRevision())
continue;
if (needComma) {
sb.append(",");
}
sb.append("[");
if (op.type == UpdateOp.Operation.Type.INCREMENT) {
sb.append("\"+\",");
} else if (op.type == UpdateOp.Operation.Type.SET || op.type == UpdateOp.Operation.Type.SET_MAP_ENTRY) {
sb.append("\"=\",");
} else if (op.type == UpdateOp.Operation.Type.MAX) {
sb.append("\"M\",");
} else if (op.type == UpdateOp.Operation.Type.REMOVE || op.type == UpdateOp.Operation.Type.REMOVE_MAP_ENTRY) {
sb.append("\"*\",");
} else {
throw new DocumentStoreException("Can't serialize " + update.toString() + " for JSON append");
}
appendJsonString(sb, key.getName());
sb.append(",");
Revision rev = key.getRevision();
if (rev != null) {
appendJsonString(sb, rev.toString());
sb.append(",");
}
appendJsonValue(sb, op.value);
sb.append("]");
needComma = true;
}
return sb.append("]").toString();
}
Aggregations