use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.
the class NodeDocumentTest method getNewestRevisionTooExpensive.
@Test
public void getNewestRevisionTooExpensive() throws Exception {
final int NUM_CHANGES = 200;
final Set<String> prevDocCalls = newHashSet();
DocumentStore store = new MemoryDocumentStore() {
@Override
public <T extends Document> T find(Collection<T> collection, String key) {
if (Utils.getPathFromId(key).startsWith("p")) {
prevDocCalls.add(key);
}
return super.find(collection, key);
}
};
DocumentNodeStore ns = new DocumentMK.Builder().setDocumentStore(store).setAsyncDelay(0).getNodeStore();
// create test data
for (int i = 0; i < NUM_CHANGES; i++) {
NodeBuilder builder = ns.getRoot().builder();
if (builder.hasChildNode("test")) {
builder.child("test").remove();
builder.child("foo").remove();
} else {
builder.child("test");
builder.child("foo");
}
merge(ns, builder);
if (Math.random() < 0.2) {
RevisionVector head = ns.getHeadRevision();
NodeDocument doc = ns.getDocumentStore().find(NODES, Utils.getIdFromPath("/test"));
for (UpdateOp op : SplitOperations.forDocument(doc, ns, head, NO_BINARY, 2)) {
store.createOrUpdate(NODES, op);
}
}
}
NodeDocument doc = ns.getDocumentStore().find(NODES, Utils.getIdFromPath("/test"));
// get most recent previous doc
NodeDocument prev = doc.getAllPreviousDocs().next();
// simulate a change revision within the range of
// the most recent previous document
Iterable<Revision> changes = prev.getAllChanges();
Revision baseRev = Iterables.getLast(changes);
Revision changeRev = new Revision(baseRev.getTimestamp(), 1000, ns.getClusterId());
// reset calls to previous documents
prevDocCalls.clear();
doc.getNewestRevision(ns, new RevisionVector(baseRev), changeRev, null, new HashSet<Revision>());
// must not read all previous docs
assertTrue("too many calls for previous documents: " + prevDocCalls, prevDocCalls.size() <= 5);
ns.dispose();
}
use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.
the class ManyClusterNodesTest method before.
@Before
public void before() throws Exception {
ds = new MemoryDocumentStore();
for (int i = 0; i < NUM_CLUSTER_NODES; i++) {
stores.add(builderProvider.newBuilder().setClusterId(i + 1).setDocumentStore(ds).setAsyncDelay(0).getNodeStore());
stores.get(i).runBackgroundOperations();
}
}
use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.
the class MergeRetryTest method retryInMemory.
/**
* Test for OAK-1198
*/
@Test
public void retryInMemory() throws Exception {
MemoryDocumentStore ds = new MemoryDocumentStore();
MemoryBlobStore bs = new MemoryBlobStore();
DocumentNodeStore ns1 = createMK(1, 1000, ds, bs);
DocumentNodeStore ns2 = createMK(2, 1000, ds, bs);
try {
NodeBuilder builder1 = ns1.getRoot().builder();
builder1.child("bar");
NodeBuilder builder2 = ns2.getRoot().builder();
builder2.child("qux");
ns1.merge(builder1, HOOK, CommitInfo.EMPTY);
ns2.merge(builder2, HOOK, CommitInfo.EMPTY);
} finally {
ns1.dispose();
ns2.dispose();
}
}
use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.
the class NodeDocumentTest method splitCollisions.
@Test
public void splitCollisions() throws Exception {
MemoryDocumentStore docStore = new MemoryDocumentStore();
String id = Utils.getPathFromId("/");
NodeDocument doc = new NodeDocument(docStore);
doc.put(Document.ID, id);
UpdateOp op = new UpdateOp(id, false);
for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD + 1; i++) {
Revision r = Revision.newRevision(1);
NodeDocument.setRevision(op, r, "c");
NodeDocument.addCollision(op, r, Revision.newRevision(1));
}
UpdateUtils.applyChanges(doc, op);
RevisionVector head = DummyRevisionContext.INSTANCE.getHeadRevision();
doc.split(DummyRevisionContext.INSTANCE, head, NO_BINARY);
}
use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.
the class RDBExport method main.
public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {
String url = null, user = null, pw = null, table = "nodes", query = null, dumpfile = null, lobdir = null;
List<String> fieldList = Collections.emptyList();
Format format = Format.JSON;
PrintStream out = System.out;
Set<String> excl = new HashSet<String>();
excl.add(Document.ID);
RDBDocumentSerializer ser = new RDBDocumentSerializer(new MemoryDocumentStore(), excl);
String columns = null;
String param = null;
try {
for (int i = 0; i < args.length; i++) {
param = args[i];
if ("-u".equals(param) || "--username".equals(param)) {
user = args[++i];
} else if ("-p".equals(param) || "--password".equals(param)) {
pw = args[++i];
} else if ("-c".equals(param) || "--collection".equals(param)) {
table = args[++i];
} else if ("-j".equals(param) || "--jdbc-url".equals(param)) {
url = args[++i];
} else if ("-q".equals(param) || "--query".equals(param)) {
query = args[++i];
} else if ("-o".equals(param) || "--out".equals(param)) {
OutputStream os = new FileOutputStream(args[++i]);
out = new PrintStream(os, true, "UTF-8");
} else if ("--from-db2-dump".equals(param)) {
dumpfile = args[++i];
} else if ("--lobdir".equals(param)) {
lobdir = args[++i];
} else if ("--jsonArray".equals(param)) {
format = Format.JSONARRAY;
} else if ("--csv".equals(param)) {
format = Format.CSV;
} else if ("--columns".equals(param)) {
columns = args[++i];
} else if ("--fields".equals(param)) {
String fields = args[++i];
fieldList = Arrays.asList(fields.split(","));
} else if ("--version".equals(param)) {
System.out.println(RDBExport.class.getName() + " version " + OakVersion.getVersion());
System.exit(0);
} else if ("--help".equals(param)) {
printHelp();
System.exit(0);
} else {
System.err.println(RDBExport.class.getName() + ": invalid parameter " + args[i]);
printUsage();
System.exit(2);
}
}
} catch (IndexOutOfBoundsException ex) {
System.err.println(RDBExport.class.getName() + ": value missing for parameter " + param);
printUsage();
System.exit(2);
}
if (format == Format.CSV && fieldList.isEmpty()) {
System.err.println(RDBExport.class.getName() + ": csv output requires specification of field list");
System.exit(2);
}
// JSON output with fieldList missing "_id"
if ((format == Format.JSON || format == Format.JSONARRAY) && !fieldList.isEmpty() && !fieldList.contains("_id")) {
fieldList = new ArrayList<String>(fieldList);
fieldList.add(0, "_id");
}
if (dumpfile == null && url == null) {
System.err.println(RDBExport.class.getName() + ": must use either dump file or JDBC URL");
printUsage();
System.exit(2);
} else if (dumpfile != null) {
columns = (columns == null) ? "id, modified, hasbinary, deletedonce, cmodcount, modcount, dsize, data, bdata" : columns;
List<String> columnList = Arrays.asList(columns.toLowerCase(Locale.ENGLISH).replace(" ", "").split(","));
dumpFile(dumpfile, lobdir, format, out, fieldList, columnList, ser);
} else {
if (columns != null) {
System.err.println(RDBExport.class.getName() + ": column names ignored when using JDBC");
}
dumpJDBC(url, user, pw, table, query, format, out, fieldList, ser);
}
out.flush();
out.close();
}
Aggregations