use of com.xpn.xwiki.XWikiContext in project xwiki-platform by xwiki.
the class R40000XWIKI6990DataMigration method hibernateMigrate.
@Override
public void hibernateMigrate() throws DataMigrationException, XWikiException {
final Map<Long, Long> docs = new HashMap<Long, Long>();
final List<String> customMappedClasses = new ArrayList<String>();
final Map<Long, Long> objs = new HashMap<Long, Long>();
final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();
// Get ids conversion list
getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>() {
private void fillDocumentIdConversion(Session session, Map<Long, Long> map) {
String database = getXWikiContext().getWikiId();
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from " + XWikiDocument.class.getName() + " as doc").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String space = (String) result[1];
String name = (String) result[2];
String defaultLanguage = (String) result[3];
String language = (String) result[4];
// Use a real document, since we need the language to be appended.
// TODO: Change this when the locale is integrated
XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
doc.setDefaultLanguage(defaultLanguage);
doc.setLanguage(language);
long newId = doc.getId();
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d document IDs to be converted.", map.size());
}
private void fillObjectIdConversion(Session session, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select obj.id, obj.name, obj.className, obj.number from " + BaseObject.class.getName() + " as obj").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String docName = (String) result[1];
String className = (String) result[2];
Integer number = (Integer) result[3];
BaseObjectReference objRef = new BaseObjectReference(R40000XWIKI6990DataMigration.this.resolver.resolve(className), number, R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d object IDs to be converted.", map.size());
}
private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context) throws XWikiException {
processCustomMappings(store, new CustomMappingCallback() {
@Override
public void processCustomMapping(XWikiHibernateStore store, String name, String mapping, boolean hasDynamicMapping) throws XWikiException {
if (INTERNAL.equals(mapping) || hasDynamicMapping) {
customMappedClasses.add(name);
}
}
}, context);
logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
}
private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select stats.id, stats.name, stats.number from " + klass.getName() + " as stats").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String statsName = (String) result[1];
Integer number = (Integer) result[2];
// Do not try to convert broken records which would cause duplicated ids
if (statsName != null && !statsName.startsWith(".") && !statsName.endsWith(".")) {
long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);
if (oldId != newId) {
map.put(oldId, newId);
}
} else {
R40000XWIKI6990DataMigration.this.logger.debug("Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
}
}
String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(), klassName.substring(0, klassName.length() - 5).toLowerCase());
}
@Override
public Object doInHibernate(Session session) throws XWikiException {
try {
fillDocumentIdConversion(session, docs);
fillObjectIdConversion(session, objs);
// Retrieve custom mapped classes
if (getStore() instanceof XWikiHibernateStore) {
fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
}
// Retrieve statistics ID conversion
for (Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = new HashMap<Long, Long>();
fillStatsConversionMap(session, statsClass, map);
stats.add(map);
}
session.clear();
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
return null;
}
});
// Cache the configuration and the dialect
this.configuration = getStore().getConfiguration();
this.dialect = this.configuration.buildSettings().getDialect();
// Check configuration for safe mode
/* True if migration should use safe but slower non-bulk native updates. */
boolean useSafeUpdates = "1".equals(getXWikiContext().getWiki().Param("xwiki.store.migration." + this.getName() + ".safemode", "0"));
// Use safe mode if the database has no temporary table supported by hibernate
useSafeUpdates = useSafeUpdates || !this.configuration.buildSettings().getDialect().supportsTemporaryTables();
// Proceed to document id conversion
if (!docs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
}
logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(docs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> docsColl = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
logProgress("Converting %d document IDs in %d tables and %d collection tables...", docs.size(), DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());
final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
convertDbId(docs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : docsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
times[this.timer++] += executeIdUpdate(doclinkClass, DOCID);
}
times[this.timer++] += executeIdUpdate(XWikiLink.class, DOCID);
times[this.timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
times[this.timer++] += executeIdUpdate(XWikiDocument.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : docsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
this.logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(), times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(), times[timer++] / 1000000);
this.logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All document IDs has been converted successfully.");
} else {
logProgress("No document IDs to convert, skipping.");
}
// Proceed to object id conversion
if (!objs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
PersistentClass objklass = getClassMapping(BaseObject.class.getName());
tableToProcess.addAll(getCollectionProperties(objklass));
for (Class<?> propertyClass : PROPERTY_CLASS) {
tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
}
for (String customClass : customMappedClasses) {
tableToProcess.addAll(getAllTableToProcess(customClass));
}
tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });
logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(objs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
// Name of classes that need manual updates
final List<String> classToProcess = new ArrayList<String>();
// Name of custom classes that need manual updates
final List<String> customClassToProcess = new ArrayList<String>();
// Pair table,key for collection table that need manual updates
final List<String[]> objsColl = new ArrayList<String[]>();
objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
for (Class<?> propertyClass : PROPERTY_CLASS) {
String className = propertyClass.getName();
PersistentClass klass = getClassMapping(className);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
classToProcess.add(className);
}
}
for (String customClass : customMappedClasses) {
PersistentClass klass = getClassMapping(customClass);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
customClassToProcess.add(customClass);
}
}
logProgress("Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...", objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());
final long[] times = new long[classToProcess.size() + 1 + customClassToProcess.size() + objsColl.size()];
convertDbId(objs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : objsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (String customMappedClass : customClassToProcess) {
times[this.timer++] += executeIdUpdate(customMappedClass, ID);
}
for (String propertyClass : classToProcess) {
times[this.timer++] += executeIdUpdate(propertyClass, IDID);
}
times[this.timer++] += executeIdUpdate(BaseObject.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : objsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (String customMappedClass : customClassToProcess) {
this.logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass, times[timer++] / 1000000);
}
for (String propertyClass : classToProcess) {
this.logger.debug("Time elapsed for {} property table: {} ms", propertyClass, times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All object IDs has been converted successfully.");
} else {
logProgress("No object IDs to convert, skipping.");
}
// Proceed to statistics id conversions
for (final Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = stats.poll();
String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();
if (!map.isEmpty()) {
if (!useSafeUpdates) {
final List<String[]> tableToProcess = new ArrayList<String[]>();
final Map<Long, Long> statids = map;
PersistentClass statklass = getClassMapping(statsClass.getName());
tableToProcess.addAll(getCollectionProperties(statklass));
tableToProcess.add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });
logProgress("Converting %d %s statistics IDs in %d tables...", map.size(), klassName, tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(statids);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> statsColl = new ArrayList<String[]>();
statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));
logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...", map.size(), klassName, statsColl.size());
final long[] times = new long[statsColl.size() + 1];
convertDbId(map, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : statsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
times[this.timer++] += executeIdUpdate(statsClass, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : statsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(), times[timer++] / 1000000);
}
}
logProgress("All %s statistics IDs has been converted successfully.", klassName);
} else {
logProgress("No %s statistics IDs to convert, skipping.", klassName);
}
}
}
use of com.xpn.xwiki.XWikiContext in project xwiki-platform by xwiki.
the class HibernateDataMigrationManager method initializeEmptyDB.
@Override
protected void initializeEmptyDB() throws DataMigrationException {
final XWikiContext context = getXWikiContext();
final XWikiHibernateBaseStore store = getStore();
final Session originalSession = store.getSession(context);
final Transaction originalTransaction = store.getTransaction(context);
store.setSession(null, context);
store.setTransaction(null, context);
try {
updateSchema(null);
setDBVersion(getLatestVersion());
} finally {
store.setSession(originalSession, context);
store.setTransaction(originalTransaction, context);
}
}
use of com.xpn.xwiki.XWikiContext in project xwiki-platform by xwiki.
the class DefaultLinkRefactoring method saveDocumentPreservingContentAuthor.
/**
* HACK: Save the given document without changing the content author because the document may loose or win
* programming and script rights as a consequence and this is not the intent of this operation. Even though the
* document content field was modified, the change is purely syntactic; the semantic is not affected so it's not
* clear whether the content author deserves to be updated or not (even without the side effects).
*
* @param document the document to be saved
* @param comment the revision comment
* @param minorEdit whether it's a minor edit or not
* @throws XWikiException if saving the document fails
*/
private void saveDocumentPreservingContentAuthor(XWikiDocument document, String comment, boolean minorEdit) throws XWikiException {
XWikiContext xcontext = this.xcontextProvider.get();
// Preserve the content author.
document.setContentDirty(false);
// Make sure the version is incremented.
document.setMetaDataDirty(true);
document.setAuthorReference(xcontext.getUserReference());
xcontext.getWiki().saveDocument(document, comment, minorEdit, xcontext);
}
use of com.xpn.xwiki.XWikiContext in project xwiki-platform by xwiki.
the class RepositoryManager method resolveAuthorId.
private String resolveAuthorId(String authorName) {
String[] authorElements = StringUtils.split(authorName, ' ');
XWikiContext xcontext = this.xcontextProvider.get();
String authorId = resolveAuthorIdOnWiki(xcontext.getWikiId(), authorName, authorElements, xcontext);
if (authorId == null && !xcontext.isMainWiki()) {
authorId = resolveAuthorIdOnWiki(xcontext.getMainXWiki(), authorName, authorElements, xcontext);
if (authorId != null) {
authorId = xcontext.getMainXWiki() + ':' + authorId;
}
}
return authorId != null ? authorId : authorName;
}
use of com.xpn.xwiki.XWikiContext in project xwiki-platform by xwiki.
the class RepositoryManager method importExtension.
public DocumentReference importExtension(String extensionId, ExtensionRepository repository, Type type) throws QueryException, XWikiException, ResolveException {
TreeMap<Version, String> versions = new TreeMap<Version, String>();
Version lastVersion = getVersions(extensionId, repository, type, versions);
if (lastVersion == null) {
throw new ExtensionNotFoundException("Can't find any version for the extension [" + extensionId + "] on repository [" + repository + "]");
} else if (versions.isEmpty()) {
// If no valid version import the last version
versions.put(lastVersion, extensionId);
} else {
// Select the last valid version
lastVersion = versions.lastKey();
}
Extension extension = repository.resolve(new ExtensionId(extensionId, lastVersion));
// Get former ids versions
Collection<ExtensionId> features = extension.getExtensionFeatures();
for (ExtensionId feature : features) {
try {
getVersions(feature.getId(), repository, type, versions);
} catch (ResolveException e) {
// Ignore
}
}
XWikiContext xcontext = this.xcontextProvider.get();
boolean needSave = false;
XWikiDocument document = getExistingExtensionDocumentById(extensionId);
if (document == null) {
// Create document
document = xcontext.getWiki().getDocument(new DocumentReference(xcontext.getWikiId(), Arrays.asList("Extension", extension.getName()), "WebHome"), xcontext);
for (int i = 1; !document.isNew(); ++i) {
document = xcontext.getWiki().getDocument(new DocumentReference(xcontext.getWikiId(), Arrays.asList("Extension", extension.getName() + ' ' + i), "WebHome"), xcontext);
}
document.readFromTemplate(this.currentResolver.resolve(XWikiRepositoryModel.EXTENSION_TEMPLATEREFERENCE), xcontext);
needSave = true;
}
// Update document
BaseObject extensionObject = document.getXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE);
if (extensionObject == null) {
extensionObject = document.newXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE, xcontext);
needSave = true;
}
if (!StringUtils.equals(extensionId, getValue(extensionObject, XWikiRepositoryModel.PROP_EXTENSION_ID, (String) null))) {
extensionObject.set(XWikiRepositoryModel.PROP_EXTENSION_ID, extensionId, xcontext);
needSave = true;
}
// Update extension informations
needSave |= updateExtension(extension, extensionObject, xcontext);
// Proxy marker
BaseObject extensionProxyObject = document.getXObject(XWikiRepositoryModel.EXTENSIONPROXY_CLASSREFERENCE);
if (extensionProxyObject == null) {
extensionProxyObject = document.newXObject(XWikiRepositoryModel.EXTENSIONPROXY_CLASSREFERENCE, xcontext);
extensionProxyObject.setIntValue(XWikiRepositoryModel.PROP_PROXY_AUTOUPDATE, 1);
needSave = true;
}
needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYID, repository.getDescriptor().getId());
needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYTYPE, repository.getDescriptor().getType());
needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYURI, repository.getDescriptor().getURI().toString());
// Remove unexisting versions
Set<String> validVersions = new HashSet<String>();
List<BaseObject> versionObjects = document.getXObjects(XWikiRepositoryModel.EXTENSIONVERSION_CLASSREFERENCE);
if (versionObjects != null) {
for (BaseObject versionObject : versionObjects) {
if (versionObject != null) {
String version = getValue(versionObject, XWikiRepositoryModel.PROP_VERSION_VERSION);
if (StringUtils.isBlank(version) || (isVersionProxyingEnabled(document) && !new DefaultVersion(version).equals(extension.getId().getVersion()))) {
// Empty version OR old versions should be proxied
document.removeXObject(versionObject);
needSave = true;
} else {
if (!versions.containsKey(new DefaultVersion(version))) {
// The version does not exist on remote repository
if (!isVersionValid(document, versionObject, xcontext)) {
// The version is invalid, removing it to not make the whole extension invalid
document.removeXObject(versionObject);
needSave = true;
} else {
// The version is valid, lets keep it
validVersions.add(version);
}
} else {
// This version exist on remote repository
validVersions.add(version);
}
}
}
}
}
List<BaseObject> dependencyObjects = document.getXObjects(XWikiRepositoryModel.EXTENSIONDEPENDENCY_CLASSREFERENCE);
if (dependencyObjects != null) {
for (BaseObject dependencyObject : dependencyObjects) {
if (dependencyObject != null) {
String version = getValue(dependencyObject, XWikiRepositoryModel.PROP_DEPENDENCY_EXTENSIONVERSION);
if (!validVersions.contains(version)) {
// The version is invalid, removing it to not make the whole extension invalid
document.removeXObject(dependencyObject);
needSave = true;
}
}
}
}
for (Map.Entry<Version, String> entry : versions.entrySet()) {
Version version = entry.getKey();
String id = entry.getValue();
try {
Extension versionExtension;
if (version.equals(extension.getId().getVersion())) {
versionExtension = extension;
} else if (isVersionProxyingEnabled(document)) {
continue;
} else {
versionExtension = repository.resolve(new ExtensionId(id, version));
}
// Update version related informations
needSave |= updateExtensionVersion(document, versionExtension);
} catch (Exception e) {
this.logger.error("Failed to resolve extension with id [" + id + "] and version [" + version + "] on repository [" + repository + "]", e);
}
}
if (needSave) {
document.setAuthorReference(xcontext.getUserReference());
if (document.isNew()) {
document.setContentAuthorReference(xcontext.getUserReference());
document.setCreatorReference(xcontext.getUserReference());
}
xcontext.getWiki().saveDocument(document, "Imported extension [" + extensionId + "] from repository [" + repository.getDescriptor() + "]", true, xcontext);
}
return document.getDocumentReference();
}
Aggregations