use of org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl in project jackrabbit-oak by apache.
the class AbstractRestrictionProviderTest method before.
@Before
@Override
public void before() throws Exception {
super.before();
valueFactory = new ValueFactoryImpl(root, namePathMapper);
globValue = valueFactory.createValue("*");
nameValue = valueFactory.createValue("nt:file", PropertyType.NAME);
nameValues = new Value[] { valueFactory.createValue("nt:folder", PropertyType.NAME), valueFactory.createValue("nt:file", PropertyType.NAME) };
RestrictionDefinition glob = new RestrictionDefinitionImpl(REP_GLOB, Type.STRING, false);
RestrictionDefinition nts = new RestrictionDefinitionImpl(REP_NT_NAMES, Type.NAMES, false);
RestrictionDefinition mand = new RestrictionDefinitionImpl("mandatory", Type.BOOLEAN, true);
supported = ImmutableMap.of(glob.getName(), glob, nts.getName(), nts, mand.getName(), mand);
restrictionProvider = new TestProvider(supported);
}
use of org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl in project jackrabbit-oak by apache.
the class ItemNameRestrictionTest method before.
@Override
public void before() throws Exception {
super.before();
Tree rootTree = root.getTree("/");
NodeUtil f = new NodeUtil(rootTree).getOrAddTree("a/d/b/e/c/f", NodeTypeConstants.NT_OAK_UNSTRUCTURED);
NodeUtil c = f.getParent();
c.setString("prop", "value");
c.setString("a", "value");
testPrincipal = getTestUser().getPrincipal();
AccessControlManager acMgr = getAccessControlManager(root);
JackrabbitAccessControlList acl = AccessControlUtils.getAccessControlList(acMgr, "/a");
vf = new ValueFactoryImpl(root, NamePathMapper.DEFAULT);
acl.addEntry(testPrincipal, privilegesFromNames(PrivilegeConstants.JCR_READ, PrivilegeConstants.REP_ADD_PROPERTIES, PrivilegeConstants.JCR_ADD_CHILD_NODES, PrivilegeConstants.JCR_REMOVE_NODE), true, Collections.<String, Value>emptyMap(), ImmutableMap.of(AccessControlConstants.REP_ITEM_NAMES, new Value[] { vf.createValue("a", PropertyType.NAME), vf.createValue("b", PropertyType.NAME), vf.createValue("c", PropertyType.NAME) }));
acMgr.setPolicy(acl.getPath(), acl);
UserManager uMgr = getUserManager(root);
testGroup = uMgr.createGroup("testGroup" + UUID.randomUUID());
root.commit();
testSession = createTestSession();
}
use of org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl in project jackrabbit-oak by apache.
the class LdapLoginTestBase method testSyncUpdate.
@Test
public void testSyncUpdate() throws Exception {
// create user upfront in order to test update mode
Authorizable user = userManager.createUser(USER_ID, null);
ExternalUser externalUser = idp.getUser(USER_ID);
user.setProperty("rep:externalId", new ValueFactoryImpl(root, NamePathMapper.DEFAULT).createValue(externalUser.getExternalId().getString()));
root.commit();
ContentSession cs = null;
try {
cs = login(new SimpleCredentials(USER_ID, USER_PWD.toCharArray()));
root.refresh();
user = userManager.getAuthorizable(USER_ID);
assertNotNull(user);
assertTrue(user.hasProperty(USER_PROP));
assertNull(userManager.getAuthorizable(GROUP_DN));
} finally {
if (cs != null) {
cs.close();
}
options.clear();
}
}
use of org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl in project jackrabbit-oak by apache.
the class ExternalLoginModule method syncUser.
/**
* Initiates synchronization of the external user.
* @param user the external user
* @throws SyncException if an error occurs
*/
private void syncUser(@Nonnull ExternalUser user) throws SyncException {
Root root = getRoot();
if (root == null) {
throw new SyncException("Cannot synchronize user. root == null");
}
UserManager userManager = getUserManager();
if (userManager == null) {
throw new SyncException("Cannot synchronize user. userManager == null");
}
int numAttempt = 0;
while (numAttempt++ < MAX_SYNC_ATTEMPTS) {
SyncContext context = null;
try {
DebugTimer timer = new DebugTimer();
context = syncHandler.createContext(idp, userManager, new ValueFactoryImpl(root, NamePathMapper.DEFAULT));
SyncResult syncResult = context.sync(user);
timer.mark("sync");
if (root.hasPendingChanges()) {
root.commit();
timer.mark("commit");
}
debug("syncUser({}) {}, status: {}", user.getId(), timer.getString(), syncResult.getStatus().toString());
return;
} catch (CommitFailedException e) {
log.warn("User synchronization failed during commit: {}. (attempt {}/{})", e.toString(), numAttempt, MAX_SYNC_ATTEMPTS);
root.refresh();
} finally {
if (context != null) {
context.close();
}
}
}
throw new SyncException("User synchronization failed during commit after " + MAX_SYNC_ATTEMPTS + " attempts");
}
use of org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl in project jackrabbit-oak by apache.
the class RepositoryUpgrade method copy.
/**
* Copies the full content from the source to the target repository.
* <p>
* The source repository <strong>must not be modified</strong> while
* the copy operation is running to avoid an inconsistent copy.
* <p>
* Note that both the source and the target repository must be closed
* during the copy operation as this method requires exclusive access
* to the repositories.
*
* @param initializer optional extra repository initializer to use
* @throws RepositoryException if the copy operation fails
*/
public void copy(RepositoryInitializer initializer) throws RepositoryException {
if (checkLongNames) {
assertNoLongNames();
}
RepositoryConfig config = source.getRepositoryConfig();
logger.info("Copying repository content from {} to Oak", config.getHomeDir());
try {
NodeBuilder targetBuilder = target.getRoot().builder();
if (VersionHistoryUtil.getVersionStorage(targetBuilder).exists() && !versionCopyConfiguration.skipOrphanedVersionsCopy()) {
logger.warn("The version storage on destination already exists. Orphaned version histories will be skipped.");
versionCopyConfiguration.setCopyOrphanedVersions(null);
}
final Root upgradeRoot = new UpgradeRoot(targetBuilder);
String workspaceName = source.getRepositoryConfig().getDefaultWorkspaceName();
SecurityProviderImpl security = new SecurityProviderImpl(mapSecurityConfig(config.getSecurityConfig()));
if (skipInitialization) {
logger.info("Skipping the repository initialization");
} else {
// init target repository first
logger.info("Initializing initial repository content from {}", config.getHomeDir());
new InitialContent().initialize(targetBuilder);
if (initializer != null) {
initializer.initialize(targetBuilder);
}
logger.debug("InitialContent completed from {}", config.getHomeDir());
for (SecurityConfiguration sc : security.getConfigurations()) {
RepositoryInitializer ri = sc.getRepositoryInitializer();
ri.initialize(targetBuilder);
logger.debug("Repository initializer '" + ri.getClass().getName() + "' completed", config.getHomeDir());
}
for (SecurityConfiguration sc : security.getConfigurations()) {
WorkspaceInitializer wi = sc.getWorkspaceInitializer();
wi.initialize(targetBuilder, workspaceName);
logger.debug("Workspace initializer '" + wi.getClass().getName() + "' completed", config.getHomeDir());
}
}
HashBiMap<String, String> uriToPrefix = HashBiMap.create();
logger.info("Copying registered namespaces");
copyNamespaces(targetBuilder, uriToPrefix);
logger.debug("Namespace registration completed.");
if (skipInitialization) {
logger.info("Skipping registering node types and privileges");
} else {
logger.info("Copying registered node types");
NodeTypeManager ntMgr = new ReadWriteNodeTypeManager() {
@Override
protected Tree getTypes() {
return upgradeRoot.getTree(NODE_TYPES_PATH);
}
@Nonnull
@Override
protected Root getWriteRoot() {
return upgradeRoot;
}
};
copyNodeTypes(ntMgr, new ValueFactoryImpl(upgradeRoot, NamePathMapper.DEFAULT));
logger.debug("Node type registration completed.");
// migrate privileges
logger.info("Copying registered privileges");
PrivilegeConfiguration privilegeConfiguration = security.getConfiguration(PrivilegeConfiguration.class);
copyCustomPrivileges(privilegeConfiguration.getPrivilegeManager(upgradeRoot, NamePathMapper.DEFAULT));
logger.debug("Privilege registration completed.");
// Triggers compilation of type information, which we need for
// the type predicates used by the bulk copy operations below.
new TypeEditorProvider(false).getRootEditor(targetBuilder.getBaseState(), targetBuilder.getNodeState(), targetBuilder, null);
}
final NodeState reportingSourceRoot = ReportingNodeState.wrap(JackrabbitNodeState.createRootNodeState(source, workspaceName, targetBuilder.getNodeState(), uriToPrefix, copyBinariesByReference, skipOnError), new LoggingReporter(logger, "Migrating", LOG_NODE_COPY, -1));
final NodeState sourceRoot;
if (filterLongNames) {
sourceRoot = NameFilteringNodeState.wrap(reportingSourceRoot);
} else {
sourceRoot = reportingSourceRoot;
}
final Stopwatch watch = Stopwatch.createStarted();
logger.info("Copying workspace content");
copyWorkspace(sourceRoot, targetBuilder, workspaceName);
// on TarMK this does call triggers the actual copy
targetBuilder.getNodeState();
logger.info("Upgrading workspace content completed in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch);
if (!versionCopyConfiguration.skipOrphanedVersionsCopy()) {
logger.info("Copying version storage");
watch.reset().start();
copyVersionStorage(targetBuilder, getVersionStorage(sourceRoot), getVersionStorage(targetBuilder), versionCopyConfiguration);
// on TarMK this does call triggers the actual copy
targetBuilder.getNodeState();
logger.info("Version storage copied in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch);
} else {
logger.info("Skipping the version storage as the copyOrphanedVersions is set to false");
}
watch.reset().start();
logger.info("Applying default commit hooks");
// TODO: default hooks?
List<CommitHook> hooks = newArrayList();
UserConfiguration userConf = security.getConfiguration(UserConfiguration.class);
String groupsPath = userConf.getParameters().getConfigValue(UserConstants.PARAM_GROUP_PATH, UserConstants.DEFAULT_GROUP_PATH);
String usersPath = userConf.getParameters().getConfigValue(UserConstants.PARAM_USER_PATH, UserConstants.DEFAULT_USER_PATH);
// hooks specific to the upgrade, need to run first
hooks.add(new EditorHook(new CompositeEditorProvider(new RestrictionEditorProvider(), new GroupEditorProvider(groupsPath), // copy referenced version histories
new VersionableEditor.Provider(sourceRoot, workspaceName, versionCopyConfiguration), new SameNameSiblingsEditor.Provider(), AuthorizableFolderEditor.provider(groupsPath, usersPath))));
// this editor works on the VersionableEditor output, so it can't be
// a part of the same EditorHook
hooks.add(new EditorHook(new VersionablePropertiesEditor.Provider()));
// security-related hooks
for (SecurityConfiguration sc : security.getConfigurations()) {
hooks.addAll(sc.getCommitHooks(workspaceName));
}
if (customCommitHooks != null) {
hooks.addAll(customCommitHooks);
}
// type validation, reference and indexing hooks
hooks.add(new EditorHook(new CompositeEditorProvider(createTypeEditorProvider(), createIndexEditorProvider())));
target.merge(targetBuilder, new LoggingCompositeHook(hooks, source, overrideEarlyShutdown()), CommitInfo.EMPTY);
logger.info("Processing commit hooks completed in {}s ({})", watch.elapsed(TimeUnit.SECONDS), watch);
logger.debug("Repository upgrade completed.");
} catch (Exception e) {
throw new RepositoryException("Failed to copy content", e);
}
}
Aggregations