use of com.xpn.xwiki.store.XWikiHibernateStore in project xwiki-platform by xwiki.
the class R40000XWIKI6990DataMigration method getLiquibaseChangeLog.
@Override
public String getLiquibaseChangeLog() throws DataMigrationException {
final XWikiHibernateBaseStore store = getStore();
this.configuration = store.getConfiguration();
final StringBuilder sb = new StringBuilder(12000);
final List<PersistentClass> classes = new ArrayList<PersistentClass>();
detectDatabaseProducts(store);
if (this.logger.isDebugEnabled()) {
if (this.isOracle) {
this.logger.debug("Oracle database detected, proceeding to all updates manually with deferred constraints.");
}
if (this.isMySQL && !this.isMySQLMyISAM) {
this.logger.debug("MySQL innoDB database detected, proceeding to simplified updates with cascaded updates.");
}
if (this.isMySQLMyISAM) {
this.logger.debug("MySQL MyISAM database detected, proceeding to all updates manually without constraints.");
}
if (this.isMSSQL) {
this.logger.debug("Microsoft SQL Server database detected, proceeding to simplified updates with cascaded u" + "pdates. During data type changes, Primary Key constraints and indexes are temporarily dropped.");
}
}
// Build the list of classes to check for updates
classes.add(getClassMapping(BaseObject.class.getName()));
for (Class<?> klass : PROPERTY_CLASS) {
classes.add(getClassMapping(klass.getName()));
}
for (Class<?> klass : STATS_CLASSES) {
classes.add(getClassMapping(klass.getName()));
}
// Initialize the counter of Change Logs
this.logCount = 0;
// do not prevent type changes, we skip all this processing for MySQL table stored using the MyISAM engine.
if (!this.isMySQLMyISAM) {
for (PersistentClass klass : classes) {
this.fkTables.addAll(getForeignKeyTables(klass));
}
}
// Drop all FK constraints
for (Table table : this.fkTables) {
appendDropForeignKeyChangeLog(sb, table);
}
// Process internal classes
for (PersistentClass klass : classes) {
// The same table mapped for StringListProperty and LargeStringProperty
if (klass.getMappedClass() != StringListProperty.class) {
// Update key types
appendDataTypeChangeLogs(sb, klass);
}
}
// Process dynamic and custom mapping
final XWikiContext context = getXWikiContext();
try {
processCustomMappings((XWikiHibernateStore) store, new CustomMappingCallback() {
@Override
public void processCustomMapping(XWikiHibernateStore store, String name, String mapping, boolean hasDynamicMapping) throws XWikiException {
if (INTERNAL.equals(mapping) || hasDynamicMapping) {
PersistentClass klass = R40000XWIKI6990DataMigration.this.configuration.getClassMapping(name);
if (!R40000XWIKI6990DataMigration.this.isMySQLMyISAM) {
List<Table> tables = getForeignKeyTables(klass);
for (Table table : tables) {
if (!R40000XWIKI6990DataMigration.this.fkTables.contains(table)) {
// Drop FK constraints for custom mapped class
appendDropForeignKeyChangeLog(sb, table);
R40000XWIKI6990DataMigration.this.fkTables.add(table);
}
}
}
// Update key types for custom mapped class
appendDataTypeChangeLogs(sb, klass);
}
}
}, context);
} catch (XWikiException e) {
throw new DataMigrationException("Unable to process custom mapped classes during schema updated", e);
}
// Add FK constraints back, activating cascaded updates
for (Table table : this.fkTables) {
appendAddForeignKeyChangeLog(sb, table);
}
// Oracle doesn't support cascaded updates, so we still need to manually update each table
if (this.isOracle) {
this.fkTables.clear();
}
logProgress("%d schema updates required.", this.logCount);
if (this.logger.isDebugEnabled()) {
this.logger.debug("About to execute this Liquibase XML: {}", sb.toString());
}
return sb.toString();
}
use of com.xpn.xwiki.store.XWikiHibernateStore in project xwiki-platform by xwiki.
the class R40000XWIKI6990DataMigration method hibernateMigrate.
@Override
public void hibernateMigrate() throws DataMigrationException, XWikiException {
final Map<Long, Long> docs = new HashMap<Long, Long>();
final List<String> customMappedClasses = new ArrayList<String>();
final Map<Long, Long> objs = new HashMap<Long, Long>();
final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();
// Get ids conversion list
getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>() {
private void fillDocumentIdConversion(Session session, Map<Long, Long> map) {
String database = getXWikiContext().getWikiId();
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from " + XWikiDocument.class.getName() + " as doc").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String space = (String) result[1];
String name = (String) result[2];
String defaultLanguage = (String) result[3];
String language = (String) result[4];
// Use a real document, since we need the language to be appended.
// TODO: Change this when the locale is integrated
XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
doc.setDefaultLanguage(defaultLanguage);
doc.setLanguage(language);
long newId = doc.getId();
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d document IDs to be converted.", map.size());
}
private void fillObjectIdConversion(Session session, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select obj.id, obj.name, obj.className, obj.number from " + BaseObject.class.getName() + " as obj").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String docName = (String) result[1];
String className = (String) result[2];
Integer number = (Integer) result[3];
BaseObjectReference objRef = new BaseObjectReference(R40000XWIKI6990DataMigration.this.resolver.resolve(className), number, R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d object IDs to be converted.", map.size());
}
private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context) throws XWikiException {
processCustomMappings(store, new CustomMappingCallback() {
@Override
public void processCustomMapping(XWikiHibernateStore store, String name, String mapping, boolean hasDynamicMapping) throws XWikiException {
if (INTERNAL.equals(mapping) || hasDynamicMapping) {
customMappedClasses.add(name);
}
}
}, context);
logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
}
private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select stats.id, stats.name, stats.number from " + klass.getName() + " as stats").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String statsName = (String) result[1];
Integer number = (Integer) result[2];
// Do not try to convert broken records which would cause duplicated ids
if (statsName != null && !statsName.startsWith(".") && !statsName.endsWith(".")) {
long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);
if (oldId != newId) {
map.put(oldId, newId);
}
} else {
R40000XWIKI6990DataMigration.this.logger.debug("Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
}
}
String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(), klassName.substring(0, klassName.length() - 5).toLowerCase());
}
@Override
public Object doInHibernate(Session session) throws XWikiException {
try {
fillDocumentIdConversion(session, docs);
fillObjectIdConversion(session, objs);
// Retrieve custom mapped classes
if (getStore() instanceof XWikiHibernateStore) {
fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
}
// Retrieve statistics ID conversion
for (Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = new HashMap<Long, Long>();
fillStatsConversionMap(session, statsClass, map);
stats.add(map);
}
session.clear();
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
return null;
}
});
// Cache the configuration and the dialect
this.configuration = getStore().getConfiguration();
this.dialect = this.configuration.buildSettings().getDialect();
// Check configuration for safe mode
/* True if migration should use safe but slower non-bulk native updates. */
boolean useSafeUpdates = "1".equals(getXWikiContext().getWiki().Param("xwiki.store.migration." + this.getName() + ".safemode", "0"));
// Use safe mode if the database has no temporary table supported by hibernate
useSafeUpdates = useSafeUpdates || !this.configuration.buildSettings().getDialect().supportsTemporaryTables();
// Proceed to document id conversion
if (!docs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
}
logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(docs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> docsColl = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
logProgress("Converting %d document IDs in %d tables and %d collection tables...", docs.size(), DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());
final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
convertDbId(docs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : docsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
times[this.timer++] += executeIdUpdate(doclinkClass, DOCID);
}
times[this.timer++] += executeIdUpdate(XWikiLink.class, DOCID);
times[this.timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
times[this.timer++] += executeIdUpdate(XWikiDocument.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : docsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
this.logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(), times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(), times[timer++] / 1000000);
this.logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All document IDs has been converted successfully.");
} else {
logProgress("No document IDs to convert, skipping.");
}
// Proceed to object id conversion
if (!objs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
PersistentClass objklass = getClassMapping(BaseObject.class.getName());
tableToProcess.addAll(getCollectionProperties(objklass));
for (Class<?> propertyClass : PROPERTY_CLASS) {
tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
}
for (String customClass : customMappedClasses) {
tableToProcess.addAll(getAllTableToProcess(customClass));
}
tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });
logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(objs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
// Name of classes that need manual updates
final List<String> classToProcess = new ArrayList<String>();
// Name of custom classes that need manual updates
final List<String> customClassToProcess = new ArrayList<String>();
// Pair table,key for collection table that need manual updates
final List<String[]> objsColl = new ArrayList<String[]>();
objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
for (Class<?> propertyClass : PROPERTY_CLASS) {
String className = propertyClass.getName();
PersistentClass klass = getClassMapping(className);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
classToProcess.add(className);
}
}
for (String customClass : customMappedClasses) {
PersistentClass klass = getClassMapping(customClass);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
customClassToProcess.add(customClass);
}
}
logProgress("Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...", objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());
final long[] times = new long[classToProcess.size() + 1 + customClassToProcess.size() + objsColl.size()];
convertDbId(objs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : objsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (String customMappedClass : customClassToProcess) {
times[this.timer++] += executeIdUpdate(customMappedClass, ID);
}
for (String propertyClass : classToProcess) {
times[this.timer++] += executeIdUpdate(propertyClass, IDID);
}
times[this.timer++] += executeIdUpdate(BaseObject.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : objsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (String customMappedClass : customClassToProcess) {
this.logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass, times[timer++] / 1000000);
}
for (String propertyClass : classToProcess) {
this.logger.debug("Time elapsed for {} property table: {} ms", propertyClass, times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All object IDs has been converted successfully.");
} else {
logProgress("No object IDs to convert, skipping.");
}
// Proceed to statistics id conversions
for (final Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = stats.poll();
String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();
if (!map.isEmpty()) {
if (!useSafeUpdates) {
final List<String[]> tableToProcess = new ArrayList<String[]>();
final Map<Long, Long> statids = map;
PersistentClass statklass = getClassMapping(statsClass.getName());
tableToProcess.addAll(getCollectionProperties(statklass));
tableToProcess.add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });
logProgress("Converting %d %s statistics IDs in %d tables...", map.size(), klassName, tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(statids);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> statsColl = new ArrayList<String[]>();
statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));
logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...", map.size(), klassName, statsColl.size());
final long[] times = new long[statsColl.size() + 1];
convertDbId(map, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : statsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
times[this.timer++] += executeIdUpdate(statsClass, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : statsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(), times[timer++] / 1000000);
}
}
logProgress("All %s statistics IDs has been converted successfully.", klassName);
} else {
logProgress("No %s statistics IDs to convert, skipping.", klassName);
}
}
}
use of com.xpn.xwiki.store.XWikiHibernateStore in project xwiki-platform by xwiki.
the class DocumentStatsStoreItem method storeInternal.
@Override
public void storeInternal(List<XWikiStatsStoreItem> stats) {
DocumentStatsStoreItem lastItem = (DocumentStatsStoreItem) stats.get(stats.size() - 1);
XWikiHibernateStore store = this.context.getWiki().getHibernateStore();
if (store == null) {
return;
}
DocumentStats documentStat = new DocumentStats(lastItem.name, lastItem.action, lastItem.periodDate, lastItem.periodType);
// Load old statistics object from database
try {
// TODO Fix use of deprecated call.
store.loadXWikiCollection(documentStat, this.context, true);
} catch (XWikiException e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Failed to load document statistics object [{}]", getId(), e);
}
}
// Increment counters
documentStat.setIntValue("pageViews", documentStat.getPageViews() + stats.size());
for (XWikiStatsStoreItem statItem : stats) {
DocumentStatsStoreItem docStat = (DocumentStatsStoreItem) statItem;
if (docStat.isVisit) {
documentStat.incVisits();
}
}
// Re-save statistics object
try {
// TODO Fix use of deprecated call.
store.saveXWikiCollection(documentStat, this.context, true);
} catch (XWikiException e) {
LOGGER.error("Failed to save document statistics object [{}]", getId(), e);
}
}
use of com.xpn.xwiki.store.XWikiHibernateStore in project xwiki-platform by xwiki.
the class XWikiStatsReader method getDocMonthStats.
// ////////////////////////////////////////////////////////////////////////////////////////
// Deprecated methods
// ////////////////////////////////////////////////////////////////////////////////////////
/**
* Gets monthly statistics on a document for a specific action.
*
* @param docname fully qualified document name.
* @param action can be "view", "edit", "save", etc..
* @param month the month.
* @param context the XWiki context.
* @return DocumentStats - statistics object.
* @deprecated use {@link #getDocumentStatistics(String, Scope, Period, Range , XWikiContext)} instead.
*/
@Deprecated
public DocumentStats getDocMonthStats(String docname, String action, Date month, XWikiContext context) {
XWikiHibernateStore store = context.getWiki().getHibernateStore();
DocumentStats object = new DocumentStats(docname, action, month, PeriodType.MONTH);
try {
// TODO Fix use of deprecated call.
store.loadXWikiCollection(object, context, true);
return object;
} catch (XWikiException e) {
e.printStackTrace();
return new DocumentStats();
}
}
use of com.xpn.xwiki.store.XWikiHibernateStore in project xwiki-platform by xwiki.
the class XWikiStatsReader method getBackLinkStatistics.
/**
* Retrieves back-link statistics.
*
* @param domain the domain used for filtering the results.
* @param scope the scope of referred documents for which to retrieve statistics.
* @param period the period of time, including its start date but excluding its end date.
* @param range the sub-range to return from the entire result set. Use this parameter for pagination.
* @param context the XWiki context.
* @return a list of DocumentStats objects.
*/
public List<DocumentStats> getBackLinkStatistics(String domain, Scope scope, Period period, Range range, XWikiContext context) {
List<DocumentStats> documentStatsList;
List<Object> paramList = new ArrayList<Object>(4);
String nameFilter = getHqlNameFilterFromScope(scope, paramList);
String sortOrder = getHqlSortOrderFromRange(range);
XWikiHibernateStore store = context.getWiki().getHibernateStore();
try {
String query = MessageFormat.format("select name, sum(pageViews) from RefererStats" + " where ({0}) and referer like ? and ? <= period and period < ? group by name" + " order by sum(pageViews) {1}", nameFilter, sortOrder);
paramList.add(getHqlValidDomain(domain));
paramList.add(period.getStartCode());
paramList.add(period.getEndCode());
List<?> solist = store.search(query, range.getAbsoluteSize(), range.getAbsoluteStart(), paramList, context);
documentStatsList = getDocumentStatistics(solist, "refer");
if (range.getSize() < 0) {
Collections.reverse(documentStatsList);
}
} catch (XWikiException e) {
documentStatsList = Collections.emptyList();
}
return documentStatsList;
}
Aggregations