use of org.apache.jackrabbit.oak.plugins.document.DocumentMK in project jackrabbit-oak by apache.
the class ConcurrentWriteMultipleMkMongoTest method doTest.
private void doTest(int numberOfNodes) throws Exception {
int numberOfChildren = 10;
int numberOfMks = 3;
String[] prefixes = new String[] { "a", "b", "c", "d", "e", "f" };
ExecutorService executor = Executors.newFixedThreadPool(numberOfMks);
List<DocumentMK> mks = new ArrayList<DocumentMK>();
for (int i = 0; i < numberOfMks; i++) {
String diff = buildPyramidDiff("/", 0, numberOfChildren, numberOfNodes, prefixes[i], new StringBuilder()).toString();
DocumentMK mk = new DocumentMK.Builder().open();
mks.add(mk);
GenericWriteTask task = new GenericWriteTask("mk-" + i, mk, diff, 10);
executor.execute(task);
}
executor.shutdown();
executor.awaitTermination(10, TimeUnit.MINUTES);
for (DocumentMK mk : mks) {
mk.dispose();
}
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentMK in project jackrabbit-oak by apache.
the class ConcurrentAddNodesClusterIT method initRepository.
private static void initRepository() throws Exception {
MongoConnection con = createConnection();
DocumentMK mk = new DocumentMK.Builder().setMongoDB(con.getDB()).setClusterId(1).open();
Repository repository = new Jcr(mk.getNodeStore()).createRepository();
Session session = repository.login(new SimpleCredentials("admin", "admin".toCharArray()));
session.logout();
dispose(repository);
// closes connection as well
mk.dispose();
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentMK in project jackrabbit-oak by apache.
the class DocumentClusterIT method initRepository.
/**
* initialise the repository
*
* @param clazz the current class. Used for logging. Cannot be null.
* @param repos list to which add the created repository. Cannot be null.
* @param mks list to which add the created MK. Cannot be null.
* @param clusterId the cluster ID to use. Must be greater than 0.
* @param asyncDelay the async delay to set. For default use {@link #NOT_PROVIDED}
* @throws Exception
*/
protected void initRepository(@Nonnull final Class<?> clazz, @Nonnull final List<Repository> repos, @Nonnull final List<DocumentMK> mks, final int clusterId, final int asyncDelay) throws Exception {
DocumentMK.Builder builder = new DocumentMK.Builder();
builder.setMongoDB(createConnection(checkNotNull(clazz)).getDB());
if (asyncDelay != NOT_PROVIDED) {
builder.setAsyncDelay(asyncDelay);
}
builder.setClusterId(clusterId);
DocumentMK mk = builder.open();
Jcr j = getJcr(mk.getNodeStore());
Set<IndexEditorProvider> ieps = additionalIndexEditorProviders();
if (ieps != null) {
for (IndexEditorProvider p : ieps) {
j = j.with(p);
}
}
if (isAsyncIndexing()) {
j = j.withAsyncIndexing();
}
Repository repository = j.createRepository();
checkNotNull(repos).add(repository);
checkNotNull(mks).add(mk);
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentMK in project jackrabbit-oak by apache.
the class DocumentClusterIT method before.
@Before
public void before() throws Exception {
dropDB(this.getClass());
List<Repository> rs = new ArrayList<Repository>();
List<DocumentMK> ds = new ArrayList<DocumentMK>();
initRepository(this.getClass(), rs, ds, 1, NOT_PROVIDED);
Repository repository = rs.get(0);
DocumentMK mk = ds.get(0);
Session session = repository.login(ADMIN);
session.logout();
dispose(repository);
// closes connection as well
mk.dispose();
}
use of org.apache.jackrabbit.oak.plugins.document.DocumentMK in project jackrabbit-oak by apache.
the class DocumentClusterIT method alignCluster.
/**
* <p>
* ensures that the cluster is aligned by running all the background operations
* </p>
*
* @param mks the list of {@link DocumentMK} composing the cluster. Cannot be null.
*/
static void alignCluster(@Nonnull final List<DocumentMK> mks) {
// in a first round let all MKs run their background update
for (DocumentMK mk : mks) {
mk.getNodeStore().runBackgroundOperations();
}
String id = Utils.getIdFromPath("/");
// in the second round each MK will pick up changes from the others
for (DocumentMK mk : mks) {
// invalidate root document to make sure background read
// is forced to fetch the document from the store
mk.getDocumentStore().invalidateCache(Collections.singleton(id));
mk.getNodeStore().runBackgroundOperations();
}
}
Aggregations