Search in sources :

Example 66 with MemoryDocumentStore

use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.

the class NodeDocumentTest method getNewestRevisionTooExpensive.

@Test
public void getNewestRevisionTooExpensive() throws Exception {
    final int NUM_CHANGES = 200;
    final Set<String> prevDocCalls = newHashSet();
    DocumentStore store = new MemoryDocumentStore() {

        @Override
        public <T extends Document> T find(Collection<T> collection, String key) {
            if (Utils.getPathFromId(key).startsWith("p")) {
                prevDocCalls.add(key);
            }
            return super.find(collection, key);
        }
    };
    DocumentNodeStore ns = new DocumentMK.Builder().setDocumentStore(store).setAsyncDelay(0).getNodeStore();
    // create test data
    for (int i = 0; i < NUM_CHANGES; i++) {
        NodeBuilder builder = ns.getRoot().builder();
        if (builder.hasChildNode("test")) {
            builder.child("test").remove();
            builder.child("foo").remove();
        } else {
            builder.child("test");
            builder.child("foo");
        }
        merge(ns, builder);
        if (Math.random() < 0.2) {
            RevisionVector head = ns.getHeadRevision();
            NodeDocument doc = ns.getDocumentStore().find(NODES, Utils.getIdFromPath("/test"));
            for (UpdateOp op : SplitOperations.forDocument(doc, ns, head, NO_BINARY, 2)) {
                store.createOrUpdate(NODES, op);
            }
        }
    }
    NodeDocument doc = ns.getDocumentStore().find(NODES, Utils.getIdFromPath("/test"));
    // get most recent previous doc
    NodeDocument prev = doc.getAllPreviousDocs().next();
    // simulate a change revision within the range of
    // the most recent previous document
    Iterable<Revision> changes = prev.getAllChanges();
    Revision baseRev = Iterables.getLast(changes);
    Revision changeRev = new Revision(baseRev.getTimestamp(), 1000, ns.getClusterId());
    // reset calls to previous documents
    prevDocCalls.clear();
    doc.getNewestRevision(ns, new RevisionVector(baseRev), changeRev, null, new HashSet<Revision>());
    // must not read all previous docs
    assertTrue("too many calls for previous documents: " + prevDocCalls, prevDocCalls.size() <= 5);
    ns.dispose();
}
Also used : MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) Utils.getRootDocument(org.apache.jackrabbit.oak.plugins.document.util.Utils.getRootDocument) NodeBuilder(org.apache.jackrabbit.oak.spi.state.NodeBuilder) MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) Test(org.junit.Test)

Example 67 with MemoryDocumentStore

use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.

the class ManyClusterNodesTest method before.

@Before
public void before() throws Exception {
    ds = new MemoryDocumentStore();
    for (int i = 0; i < NUM_CLUSTER_NODES; i++) {
        stores.add(builderProvider.newBuilder().setClusterId(i + 1).setDocumentStore(ds).setAsyncDelay(0).getNodeStore());
        stores.get(i).runBackgroundOperations();
    }
}
Also used : MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) Before(org.junit.Before)

Example 68 with MemoryDocumentStore

use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.

the class MergeRetryTest method retryInMemory.

/**
     * Test for OAK-1198
     */
@Test
public void retryInMemory() throws Exception {
    MemoryDocumentStore ds = new MemoryDocumentStore();
    MemoryBlobStore bs = new MemoryBlobStore();
    DocumentNodeStore ns1 = createMK(1, 1000, ds, bs);
    DocumentNodeStore ns2 = createMK(2, 1000, ds, bs);
    try {
        NodeBuilder builder1 = ns1.getRoot().builder();
        builder1.child("bar");
        NodeBuilder builder2 = ns2.getRoot().builder();
        builder2.child("qux");
        ns1.merge(builder1, HOOK, CommitInfo.EMPTY);
        ns2.merge(builder2, HOOK, CommitInfo.EMPTY);
    } finally {
        ns1.dispose();
        ns2.dispose();
    }
}
Also used : MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) MemoryBlobStore(org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore) NodeBuilder(org.apache.jackrabbit.oak.spi.state.NodeBuilder) Test(org.junit.Test)

Example 69 with MemoryDocumentStore

use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.

the class NodeDocumentTest method splitCollisions.

@Test
public void splitCollisions() throws Exception {
    MemoryDocumentStore docStore = new MemoryDocumentStore();
    String id = Utils.getPathFromId("/");
    NodeDocument doc = new NodeDocument(docStore);
    doc.put(Document.ID, id);
    UpdateOp op = new UpdateOp(id, false);
    for (int i = 0; i < NodeDocument.NUM_REVS_THRESHOLD + 1; i++) {
        Revision r = Revision.newRevision(1);
        NodeDocument.setRevision(op, r, "c");
        NodeDocument.addCollision(op, r, Revision.newRevision(1));
    }
    UpdateUtils.applyChanges(doc, op);
    RevisionVector head = DummyRevisionContext.INSTANCE.getHeadRevision();
    doc.split(DummyRevisionContext.INSTANCE, head, NO_BINARY);
}
Also used : MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) Test(org.junit.Test)

Example 70 with MemoryDocumentStore

use of org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore in project jackrabbit-oak by apache.

the class RDBExport method main.

public static void main(String[] args) throws ClassNotFoundException, SQLException, IOException {
    String url = null, user = null, pw = null, table = "nodes", query = null, dumpfile = null, lobdir = null;
    List<String> fieldList = Collections.emptyList();
    Format format = Format.JSON;
    PrintStream out = System.out;
    Set<String> excl = new HashSet<String>();
    excl.add(Document.ID);
    RDBDocumentSerializer ser = new RDBDocumentSerializer(new MemoryDocumentStore(), excl);
    String columns = null;
    String param = null;
    try {
        for (int i = 0; i < args.length; i++) {
            param = args[i];
            if ("-u".equals(param) || "--username".equals(param)) {
                user = args[++i];
            } else if ("-p".equals(param) || "--password".equals(param)) {
                pw = args[++i];
            } else if ("-c".equals(param) || "--collection".equals(param)) {
                table = args[++i];
            } else if ("-j".equals(param) || "--jdbc-url".equals(param)) {
                url = args[++i];
            } else if ("-q".equals(param) || "--query".equals(param)) {
                query = args[++i];
            } else if ("-o".equals(param) || "--out".equals(param)) {
                OutputStream os = new FileOutputStream(args[++i]);
                out = new PrintStream(os, true, "UTF-8");
            } else if ("--from-db2-dump".equals(param)) {
                dumpfile = args[++i];
            } else if ("--lobdir".equals(param)) {
                lobdir = args[++i];
            } else if ("--jsonArray".equals(param)) {
                format = Format.JSONARRAY;
            } else if ("--csv".equals(param)) {
                format = Format.CSV;
            } else if ("--columns".equals(param)) {
                columns = args[++i];
            } else if ("--fields".equals(param)) {
                String fields = args[++i];
                fieldList = Arrays.asList(fields.split(","));
            } else if ("--version".equals(param)) {
                System.out.println(RDBExport.class.getName() + " version " + OakVersion.getVersion());
                System.exit(0);
            } else if ("--help".equals(param)) {
                printHelp();
                System.exit(0);
            } else {
                System.err.println(RDBExport.class.getName() + ": invalid parameter " + args[i]);
                printUsage();
                System.exit(2);
            }
        }
    } catch (IndexOutOfBoundsException ex) {
        System.err.println(RDBExport.class.getName() + ": value missing for parameter " + param);
        printUsage();
        System.exit(2);
    }
    if (format == Format.CSV && fieldList.isEmpty()) {
        System.err.println(RDBExport.class.getName() + ": csv output requires specification of field list");
        System.exit(2);
    }
    // JSON output with fieldList missing "_id"
    if ((format == Format.JSON || format == Format.JSONARRAY) && !fieldList.isEmpty() && !fieldList.contains("_id")) {
        fieldList = new ArrayList<String>(fieldList);
        fieldList.add(0, "_id");
    }
    if (dumpfile == null && url == null) {
        System.err.println(RDBExport.class.getName() + ": must use either dump file or JDBC URL");
        printUsage();
        System.exit(2);
    } else if (dumpfile != null) {
        columns = (columns == null) ? "id, modified, hasbinary, deletedonce, cmodcount, modcount, dsize, data, bdata" : columns;
        List<String> columnList = Arrays.asList(columns.toLowerCase(Locale.ENGLISH).replace(" ", "").split(","));
        dumpFile(dumpfile, lobdir, format, out, fieldList, columnList, ser);
    } else {
        if (columns != null) {
            System.err.println(RDBExport.class.getName() + ": column names ignored when using JDBC");
        }
        dumpJDBC(url, user, pw, table, query, format, out, fieldList, ser);
    }
    out.flush();
    out.close();
}
Also used : PrintStream(java.io.PrintStream) MemoryDocumentStore(org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet)

Aggregations

MemoryDocumentStore (org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore)132 Test (org.junit.Test)113 NodeBuilder (org.apache.jackrabbit.oak.spi.state.NodeBuilder)72 Clock (org.apache.jackrabbit.oak.stats.Clock)21 NodeState (org.apache.jackrabbit.oak.spi.state.NodeState)19 Before (org.junit.Before)13 CommitFailedException (org.apache.jackrabbit.oak.api.CommitFailedException)11 MemoryBlobStore (org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore)10 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)9 Utils.getRootDocument (org.apache.jackrabbit.oak.plugins.document.util.Utils.getRootDocument)9 Random (java.util.Random)8 CONSTRAINT (org.apache.jackrabbit.oak.api.CommitFailedException.CONSTRAINT)8 DocumentNodeStore (org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore)7 ChildNodeEntry (org.apache.jackrabbit.oak.spi.state.ChildNodeEntry)6 ArrayList (java.util.ArrayList)5 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)5 StringSort (org.apache.jackrabbit.oak.commons.sort.StringSort)5 List (java.util.List)4 Semaphore (java.util.concurrent.Semaphore)4 VersionGCStats (org.apache.jackrabbit.oak.plugins.document.VersionGarbageCollector.VersionGCStats)4