use of org.apache.jackrabbit.oak.spi.state.ApplyDiff in project jackrabbit-oak by apache.
the class ExternalIdentityRepositoryInitializer method initialize.
@Override
public void initialize(@Nonnull NodeBuilder builder) {
NodeState base = builder.getNodeState();
NodeStore store = new MemoryNodeStore(base);
String errorMsg = "Failed to initialize external identity content.";
try {
Root root = RootFactory.createSystemRoot(store, new EditorHook(new CompositeEditorProvider(new NamespaceEditorProvider(), new TypeEditorProvider())), null, null, null, null);
// create index definition for "rep:externalId" and "rep:externalPrincipalNames"
Tree rootTree = root.getTree(PathUtils.ROOT_PATH);
checkState(rootTree.exists());
Tree index = TreeUtil.getOrAddChild(rootTree, IndexConstants.INDEX_DEFINITIONS_NAME, JcrConstants.NT_UNSTRUCTURED);
if (enforceUniqueIds && !index.hasChild("externalId")) {
Tree definition = IndexUtils.createIndexDefinition(index, "externalId", true, new String[] { ExternalIdentityConstants.REP_EXTERNAL_ID });
definition.setProperty("info", "Oak index assuring uniqueness of rep:externalId properties.");
}
if (!index.hasChild("externalPrincipalNames")) {
Tree definition = IndexUtils.createIndexDefinition(index, "externalPrincipalNames", false, new String[] { ExternalIdentityConstants.REP_EXTERNAL_PRINCIPAL_NAMES });
definition.setProperty("info", "Oak index used by the principal management provided by the external authentication module.");
}
if (root.hasPendingChanges()) {
root.commit();
}
} catch (RepositoryException e) {
log.error(errorMsg, e);
throw new RuntimeException(e);
} catch (CommitFailedException e) {
log.error(errorMsg, e);
throw new RuntimeException(e);
}
NodeState target = store.getRoot();
target.compareAgainstBaseState(base, new ApplyDiff(builder));
}
use of org.apache.jackrabbit.oak.spi.state.ApplyDiff in project jackrabbit-oak by apache.
the class PrivilegeInitializer method initialize.
@Override
public void initialize(@Nonnull NodeBuilder builder) {
NodeBuilder system = builder.child(JcrConstants.JCR_SYSTEM);
system.setProperty(JcrConstants.JCR_PRIMARYTYPE, NodeTypeConstants.NT_REP_SYSTEM, Type.NAME);
if (!system.hasChildNode(REP_PRIVILEGES)) {
NodeBuilder privileges = system.child(REP_PRIVILEGES);
privileges.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_PRIVILEGES, Type.NAME);
// squeeze node state before it is passed to store (OAK-2411)
NodeState base = squeeze(builder.getNodeState());
NodeStore store = new MemoryNodeStore(base);
try {
Root systemRoot = RootFactory.createSystemRoot(store, null, null, null, null, null);
new PrivilegeDefinitionWriter(systemRoot).writeBuiltInDefinitions();
} catch (RepositoryException e) {
log.error("Failed to register built-in privileges", e);
throw new RuntimeException(e);
}
NodeState target = store.getRoot();
target.compareAgainstBaseState(base, new ApplyDiff(builder));
}
}
use of org.apache.jackrabbit.oak.spi.state.ApplyDiff in project jackrabbit-oak by apache.
the class RepositorySidegrade method migrateWithCheckpoints.
private boolean migrateWithCheckpoints() throws CommitFailedException {
List<CheckpointRetriever.Checkpoint> checkpoints = CheckpointRetriever.getCheckpoints(source);
if (checkpoints == null) {
return false;
}
Map<String, String> nameToRevision = new LinkedHashMap<>();
Map<String, String> checkpointSegmentToDoc = new LinkedHashMap<>();
NodeState previousRoot = EmptyNodeState.EMPTY_NODE;
NodeBuilder targetRoot = target.getRoot().builder();
for (CheckpointRetriever.Checkpoint checkpoint : checkpoints) {
NodeState checkpointRoot = source.retrieve(checkpoint.getName());
boolean tracePaths;
if (previousRoot == EmptyNodeState.EMPTY_NODE) {
LOG.info("Migrating first checkpoint: {}", checkpoint.getName());
tracePaths = true;
} else {
LOG.info("Applying diff to {}", checkpoint.getName());
tracePaths = false;
}
NodeState currentRoot = wrapSource(checkpointRoot, tracePaths, true);
NodeState baseRoot = previousRoot == EmptyNodeState.EMPTY_NODE ? previousRoot : wrapSource(previousRoot, false, true);
currentRoot.compareAgainstBaseState(baseRoot, new ApplyDiff(targetRoot));
target.merge(targetRoot, EmptyHook.INSTANCE, CommitInfo.EMPTY);
previousRoot = checkpointRoot;
Map<String, String> checkpointInfo = source.checkpointInfo(checkpoint.getName());
String newCheckpointName = target.checkpoint(checkpoint.getExpiryTime() - System.currentTimeMillis(), checkpointInfo);
if (checkpointInfo.containsKey("name")) {
nameToRevision.put(checkpointInfo.get("name"), newCheckpointName);
}
checkpointSegmentToDoc.put(checkpoint.getName(), newCheckpointName);
}
NodeState sourceRoot = source.getRoot();
boolean tracePaths;
if (previousRoot == EmptyNodeState.EMPTY_NODE) {
LOG.info("No checkpoints found; migrating head");
tracePaths = true;
} else {
LOG.info("Applying diff to head");
tracePaths = false;
}
NodeState currentRoot = wrapSource(sourceRoot, tracePaths, true);
NodeState baseRoot = previousRoot == EmptyNodeState.EMPTY_NODE ? previousRoot : wrapSource(previousRoot, false, true);
currentRoot.compareAgainstBaseState(baseRoot, new ApplyDiff(targetRoot));
LOG.info("Rewriting checkpoint names in /:async {}", nameToRevision);
NodeBuilder async = targetRoot.getChildNode(":async");
for (Map.Entry<String, String> e : nameToRevision.entrySet()) {
async.setProperty(e.getKey(), e.getValue(), Type.STRING);
PropertyState temp = async.getProperty(e.getKey() + "-temp");
if (temp == null) {
continue;
}
List<String> tempValues = Lists.newArrayList(temp.getValue(Type.STRINGS));
for (Map.Entry<String, String> sToD : checkpointSegmentToDoc.entrySet()) {
if (tempValues.contains(sToD.getKey())) {
tempValues.set(tempValues.indexOf(sToD.getKey()), sToD.getValue());
}
}
async.setProperty(e.getKey() + "-temp", tempValues, Type.STRINGS);
}
target.merge(targetRoot, EmptyHook.INSTANCE, CommitInfo.EMPTY);
return true;
}
use of org.apache.jackrabbit.oak.spi.state.ApplyDiff in project jackrabbit-oak by apache.
the class LuceneIndexAggregationTest2 method createRepository.
@Override
protected ContentRepository createRepository() {
LuceneIndexProvider provider = new LuceneIndexProvider();
return new Oak().with(new InitialContent() {
@Override
public void initialize(@Nonnull NodeBuilder builder) {
super.initialize(builder);
// registering additional node types for wider testing
InputStream stream = null;
try {
stream = LuceneIndexAggregationTest2.class.getResourceAsStream("test_nodetypes.cnd");
NodeState base = builder.getNodeState();
NodeStore store = new MemoryNodeStore(base);
Root root = RootFactory.createSystemRoot(store, new EditorHook(new CompositeEditorProvider(new NamespaceEditorProvider(), new TypeEditorProvider())), null, null, null, null);
NodeTypeRegistry.register(root, stream, "testing node types");
NodeState target = store.getRoot();
target.compareAgainstBaseState(base, new ApplyDiff(builder));
} catch (Exception e) {
LOG.error("Error while registering required node types. Failing here", e);
fail("Error while registering required node types");
} finally {
printNodeTypes(builder);
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
LOG.debug("Ignoring exception on stream closing.", e);
}
}
}
}
}).with(new OpenSecurityProvider()).with(((QueryIndexProvider) provider.with(getNodeAggregator()))).with((Observer) provider).with(new LuceneIndexEditorProvider()).createContentRepository();
}
use of org.apache.jackrabbit.oak.spi.state.ApplyDiff in project jackrabbit-oak by apache.
the class CompositeNodeStore method merge.
@Override
public NodeState merge(NodeBuilder builder, CommitHook commitHook, CommitInfo info) throws CommitFailedException {
checkArgument(builder instanceof CompositeNodeBuilder);
CompositeNodeBuilder nodeBuilder = (CompositeNodeBuilder) builder;
if (!PathUtils.denotesRoot(nodeBuilder.getPath())) {
throw new IllegalArgumentException();
}
// run commit hooks and apply the changes to the builder instance
NodeState processed = commitHook.processCommit(getRoot(), rebase(nodeBuilder), info);
processed.compareAgainstBaseState(builder.getNodeState(), new ApplyDiff(nodeBuilder));
assertNoChangesOnReadOnlyMounts(nodeBuilder);
// apply the accumulated changes on individual NodeStore instances
Map<MountedNodeStore, NodeState> resultStates = newHashMap();
for (MountedNodeStore mountedNodeStore : ctx.getAllMountedNodeStores()) {
NodeStore nodeStore = mountedNodeStore.getNodeStore();
NodeBuilder partialBuilder = nodeBuilder.getBuilders().get(mountedNodeStore);
NodeState result = nodeStore.merge(partialBuilder, EmptyHook.INSTANCE, info);
resultStates.put(mountedNodeStore, result);
}
CompositeNodeState newRoot = createRootNodeState(resultStates);
for (Observer observer : observers) {
observer.contentChanged(newRoot, info);
}
return newRoot;
}
Aggregations