use of org.apache.jackrabbit.oak.plugins.document.DocumentStoreException in project jackrabbit-oak by apache.
the class RDBDocumentSerializer method fromRow.
/**
* Reconstructs a {@link Document} based on the persisted {@link RDBRow}.
*/
@Nonnull
public <T extends Document> T fromRow(@Nonnull Collection<T> collection, @Nonnull RDBRow row) throws DocumentStoreException {
final String charData = row.getData();
checkNotNull(charData, "RDBRow.getData() is null for collection " + collection + ", id: " + row.getId());
T doc = collection.newDocument(store);
doc.put(ID, row.getId());
if (row.getModified() != RDBRow.LONG_UNSET) {
doc.put(MODIFIED, row.getModified());
}
if (row.getModcount() != RDBRow.LONG_UNSET) {
doc.put(MODCOUNT, row.getModcount());
}
if (RDBDocumentStore.USECMODCOUNT && row.getCollisionsModcount() != RDBRow.LONG_UNSET) {
doc.put(CMODCOUNT, row.getCollisionsModcount());
}
if (row.hasBinaryProperties() != null) {
doc.put(HASBINARY, row.hasBinaryProperties().longValue());
}
if (row.deletedOnce() != null) {
doc.put(DELETEDONCE, row.deletedOnce().booleanValue());
}
byte[] bdata = row.getBdata();
boolean blobInUse = false;
JsopTokenizer json;
// update operations
try {
if (bdata != null && bdata.length != 0) {
String s = fromBlobData(bdata);
json = new JsopTokenizer(s);
json.read('{');
readDocumentFromJson(json, doc);
json.read(JsopReader.END);
blobInUse = true;
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
}
json = new JsopTokenizer(charData);
// start processing the VARCHAR data
try {
int next = json.read();
if (next == '{') {
if (blobInUse) {
throw new DocumentStoreException("expected literal \"blob\" but found: " + row.getData());
}
readDocumentFromJson(json, doc);
} else if (next == JsopReader.STRING) {
if (!blobInUse) {
throw new DocumentStoreException("did not expect \"blob\" here: " + row.getData());
}
if (!"blob".equals(json.getToken())) {
throw new DocumentStoreException("expected string literal \"blob\"");
}
} else {
throw new DocumentStoreException("unexpected token " + next + " in " + row.getData());
}
next = json.read();
if (next == ',') {
do {
Object ob = JSON.parse(json);
if (!(ob instanceof List)) {
throw new DocumentStoreException("expected array but got: " + ob);
}
List<List<Object>> update = (List<List<Object>>) ob;
for (List<Object> op : update) {
applyUpdate(doc, update, op);
}
} while (json.matches(','));
}
json.read(JsopReader.END);
return doc;
} catch (Exception ex) {
String message = String.format("Error processing persisted data for document '%s'", row.getId());
if (charData.length() > 0) {
int last = charData.charAt(charData.length() - 1);
if (last != '}' && last != '"' && last != ']') {
message += " (DATA column might be truncated)";
}
}
LOG.error(message, ex);
throw new DocumentStoreException(message, ex);
}
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentStoreException in project jackrabbit-oak by apache.
the class RDBDocumentSerializer method applyUpdate.
private <T extends Document> void applyUpdate(T doc, List updateString, List<Object> op) {
String opcode = op.get(0).toString();
String key = op.get(1).toString();
Revision rev = null;
Object value = null;
if (op.size() == 3) {
value = op.get(2);
} else {
rev = Revision.fromString(op.get(2).toString());
value = op.get(3);
}
Object old = doc.get(key);
if ("=".equals(opcode)) {
if (rev == null) {
doc.put(key, value);
} else {
@SuppressWarnings("unchecked") Map<Revision, Object> m = (Map<Revision, Object>) old;
if (m == null) {
m = new TreeMap<Revision, Object>(comparator);
doc.put(key, m);
}
m.put(rev, value);
}
} else if ("*".equals(opcode)) {
if (rev == null) {
doc.remove(key);
} else {
@SuppressWarnings("unchecked") Map<Revision, Object> m = (Map<Revision, Object>) old;
if (m != null) {
m.remove(rev);
}
}
} else if ("+".equals(opcode)) {
if (rev == null) {
Long x = (Long) value;
if (old == null) {
old = 0L;
}
doc.put(key, ((Long) old) + x);
} else {
throw new DocumentStoreException("unexpected operation " + op + " in: " + updateString);
}
} else if ("M".equals(opcode)) {
if (rev == null) {
Comparable newValue = (Comparable) value;
if (old == null || newValue.compareTo(old) > 0) {
doc.put(key, value);
}
} else {
throw new DocumentStoreException("unexpected operation " + op + " in: " + updateString);
}
} else {
throw new DocumentStoreException("unexpected operation " + op + " in: " + updateString);
}
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentStoreException in project jackrabbit-oak by apache.
the class RDBDocumentStore method asBytes.
public static byte[] asBytes(String data) {
byte[] bytes;
try {
bytes = data.getBytes("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error("UTF-8 not supported??", ex);
throw new DocumentStoreException(ex);
}
if (NOGZIP) {
return bytes;
} else {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
GZIPOutputStream gos = new GZIPOutputStream(bos) {
{
// TODO: make this configurable
this.def.setLevel(Deflater.BEST_SPEED);
}
};
gos.write(bytes);
gos.close();
return bos.toByteArray();
} catch (IOException ex) {
LOG.error("Error while gzipping contents", ex);
throw new DocumentStoreException(ex);
}
}
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentStoreException in project jackrabbit-oak by apache.
the class RDBDocumentStore method readDocumentsUncached.
private <T extends Document> Map<String, T> readDocumentsUncached(Collection<T> collection, Set<String> keys) {
Map<String, T> result = new HashMap<String, T>();
Connection connection = null;
RDBTableMetaData tmd = getTable(collection);
try {
connection = this.ch.getROConnection();
List<RDBRow> rows = db.read(connection, tmd, keys);
int size = rows.size();
for (int i = 0; i < size; i++) {
RDBRow row = rows.set(i, null);
T document = convertFromDBObject(collection, row);
result.put(document.getId(), document);
}
connection.commit();
} catch (Exception ex) {
throw new DocumentStoreException(ex);
} finally {
this.ch.closeConnection(connection);
}
return result;
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentStoreException in project jackrabbit-oak by apache.
the class RDBDocumentStore method internalCreateOrUpdate.
@CheckForNull
private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate, boolean checkConditions) {
T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
if (oldDoc == null) {
if (!allowCreate) {
return null;
} else if (!update.isNew()) {
throw new DocumentStoreException("Document does not exist: " + update.getId());
}
T doc = collection.newDocument(this);
if (checkConditions && !checkConditions(doc, update.getConditions())) {
return null;
}
addUpdateCounters(update);
UpdateUtils.applyChanges(doc, update);
try {
Stopwatch watch = startWatch();
if (!insertDocuments(collection, Collections.singletonList(doc))) {
throw new DocumentStoreException("Can't insert the document: " + doc.getId());
}
if (collection == Collection.NODES) {
nodesCache.putIfAbsent((NodeDocument) doc);
}
stats.doneFindAndModify(watch.elapsed(TimeUnit.NANOSECONDS), collection, update.getId(), true, true, 0);
return oldDoc;
} catch (DocumentStoreException ex) {
// may have failed due to a race condition; try update instead
// this is an edge case, so it's ok to bypass the cache
// (avoiding a race condition where the DB is already updated
// but the cache is not)
oldDoc = readDocumentUncached(collection, update.getId(), null);
if (oldDoc == null) {
// something else went wrong
LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
throw (ex);
}
return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
}
} else {
T result = internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
if (allowCreate && result == null) {
// TODO OAK-2655 need to implement some kind of retry
LOG.error("update of " + update.getId() + " failed, race condition?");
throw new DocumentStoreException("update of " + update.getId() + " failed, race condition?");
}
return result;
}
}
Aggregations