use of org.apache.jackrabbit.oak.commons.json.JsopTokenizer in project jackrabbit-oak by apache.
the class NodeDocument method fromString.
public static NodeDocument fromString(DocumentStore store, String s) {
JsopTokenizer json = new JsopTokenizer(s);
NodeDocument doc = new NodeDocument(store);
while (true) {
if (json.matches(JsopReader.END)) {
break;
}
String k = json.readString();
json.read(':');
if (json.matches(JsopReader.END)) {
break;
}
doc.put(k, fromJson(json));
json.matches(',');
}
doc.seal();
return doc;
}
use of org.apache.jackrabbit.oak.commons.json.JsopTokenizer in project jackrabbit-oak by apache.
the class RDBDocumentSerializer method fromRow.
/**
* Reconstructs a {@link Document} based on the persisted {@link RDBRow}.
*/
@Nonnull
public <T extends Document> T fromRow(@Nonnull Collection<T> collection, @Nonnull RDBRow row) throws DocumentStoreException {
final String charData = row.getData();
checkNotNull(charData, "RDBRow.getData() is null for collection " + collection + ", id: " + row.getId());
T doc = collection.newDocument(store);
doc.put(ID, row.getId());
if (row.getModified() != RDBRow.LONG_UNSET) {
doc.put(MODIFIED, row.getModified());
}
if (row.getModcount() != RDBRow.LONG_UNSET) {
doc.put(MODCOUNT, row.getModcount());
}
if (RDBDocumentStore.USECMODCOUNT && row.getCollisionsModcount() != RDBRow.LONG_UNSET) {
doc.put(CMODCOUNT, row.getCollisionsModcount());
}
if (row.hasBinaryProperties() != null) {
doc.put(HASBINARY, row.hasBinaryProperties().longValue());
}
if (row.deletedOnce() != null) {
doc.put(DELETEDONCE, row.deletedOnce().booleanValue());
}
byte[] bdata = row.getBdata();
boolean blobInUse = false;
JsopTokenizer json;
// update operations
try {
if (bdata != null && bdata.length != 0) {
String s = fromBlobData(bdata);
json = new JsopTokenizer(s);
json.read('{');
readDocumentFromJson(json, doc);
json.read(JsopReader.END);
blobInUse = true;
}
} catch (Exception ex) {
throw new DocumentStoreException(ex);
}
json = new JsopTokenizer(charData);
// start processing the VARCHAR data
try {
int next = json.read();
if (next == '{') {
if (blobInUse) {
throw new DocumentStoreException("expected literal \"blob\" but found: " + row.getData());
}
readDocumentFromJson(json, doc);
} else if (next == JsopReader.STRING) {
if (!blobInUse) {
throw new DocumentStoreException("did not expect \"blob\" here: " + row.getData());
}
if (!"blob".equals(json.getToken())) {
throw new DocumentStoreException("expected string literal \"blob\"");
}
} else {
throw new DocumentStoreException("unexpected token " + next + " in " + row.getData());
}
next = json.read();
if (next == ',') {
do {
Object ob = JSON.parse(json);
if (!(ob instanceof List)) {
throw new DocumentStoreException("expected array but got: " + ob);
}
List<List<Object>> update = (List<List<Object>>) ob;
for (List<Object> op : update) {
applyUpdate(doc, update, op);
}
} while (json.matches(','));
}
json.read(JsopReader.END);
return doc;
} catch (Exception ex) {
String message = String.format("Error processing persisted data for document '%s'", row.getId());
if (charData.length() > 0) {
int last = charData.charAt(charData.length() - 1);
if (last != '}' && last != '"' && last != ']') {
message += " (DATA column might be truncated)";
}
}
LOG.error(message, ex);
throw new DocumentStoreException(message, ex);
}
}
use of org.apache.jackrabbit.oak.commons.json.JsopTokenizer in project jackrabbit-oak by apache.
the class ChangeSet method fromString.
public static ChangeSet fromString(String json) {
JsopReader reader = new JsopTokenizer(json);
int maxPathDepth = 0;
Set<String> parentPaths = null;
Set<String> parentNodeNames = null;
Set<String> parentNodeTypes = null;
Set<String> propertyNames = null;
Set<String> allNodeTypes = null;
reader.read('{');
if (!reader.matches('}')) {
do {
String name = reader.readString();
reader.read(':');
if ("maxPathDepth".equals(name)) {
maxPathDepth = Integer.parseInt(reader.read(JsopReader.NUMBER));
} else {
Set<String> data = readArrayAsSet(reader);
if ("parentPaths".equals(name)) {
parentPaths = data;
} else if ("parentNodeNames".equals(name)) {
parentNodeNames = data;
} else if ("parentNodeTypes".equals(name)) {
parentNodeTypes = data;
} else if ("propertyNames".equals(name)) {
propertyNames = data;
} else if ("allNodeTypes".equals(name)) {
allNodeTypes = data;
}
}
} while (reader.matches(','));
reader.read('}');
}
reader.read(JsopReader.END);
return new ChangeSet(maxPathDepth, parentPaths, parentNodeNames, parentNodeTypes, propertyNames, allNodeTypes);
}
use of org.apache.jackrabbit.oak.commons.json.JsopTokenizer in project jackrabbit-oak by apache.
the class AbstractBlobStoreTest method extractFiles.
public static void extractFiles(BlobStore store, String listingId, String target) throws IOException {
String listing = new String(BlobStoreInputStream.readFully(store, listingId), "UTF-8");
JsopTokenizer t = new JsopTokenizer(listing);
File targetDir = new File(target);
targetDir.mkdirs();
t.read('{');
if (!t.matches('}')) {
do {
String file = t.readString();
t.read(':');
String id = t.readString();
byte[] data = BlobStoreInputStream.readFully(store, id);
File outFile = new File(targetDir, file);
outFile.getParentFile().mkdirs();
FileOutputStream out = new FileOutputStream(outFile);
try {
out.write(data);
} finally {
out.close();
}
} while (t.matches(','));
}
t.read('}');
}
use of org.apache.jackrabbit.oak.commons.json.JsopTokenizer in project jackrabbit-oak by apache.
the class BlobCollector method loadValue.
private void loadValue(String v, Collection<ReferencedBlob> blobs, String nodeId) {
JsopReader reader = new JsopTokenizer(v);
PropertyState p;
if (reader.matches('[')) {
p = DocumentPropertyState.readArrayProperty("x", nodeStore, reader);
if (p.getType() == Type.BINARIES) {
for (int i = 0; i < p.count(); i++) {
Blob b = p.getValue(Type.BINARY, i);
blobs.add(new ReferencedBlob(b, nodeId));
}
}
} else {
p = DocumentPropertyState.readProperty("x", nodeStore, reader);
if (p.getType() == Type.BINARY) {
Blob b = p.getValue(Type.BINARY);
blobs.add(new ReferencedBlob(b, nodeId));
}
}
}
Aggregations