use of com.xpn.xwiki.doc.XWikiDocument in project xwiki-platform by xwiki.
the class ExportAction method export.
private String export(String format, XWikiContext context) throws XWikiException, IOException {
// We currently use the PDF export infrastructure but we have to redesign the export code.
XWikiURLFactory urlFactory = new OfficeExporterURLFactory();
PdfExport exporter = new OfficeExporter();
// Check if the office exporter supports the specified format.
ExportType exportType = ((OfficeExporter) exporter).getExportType(format);
// Note 2: we don't use the office server for PDF exports since it doesn't work OOB. Instead we use FOP.
if ("pdf".equalsIgnoreCase(format)) {
// The export format is PDF or the office converter can't be used (either it doesn't support the specified
// format or the office server is not started).
urlFactory = new PdfURLFactory();
exporter = new PdfExportImpl();
exportType = ExportType.PDF;
} else if (exportType == null) {
context.put("message", "core.export.formatUnknown");
return "exception";
}
urlFactory.init(context);
context.setURLFactory(urlFactory);
handleRevision(context);
XWikiDocument doc = context.getDoc();
context.getResponse().setContentType(exportType.getMimeType());
// Compute the name of the export. Since it's gong to be saved on the user's file system it needs to be a valid
// File name. Thus we use the "path" serializer but replace the "/" separator by "_" since we're not computing
// a directory hierarchy but a file name
EntityReferenceSerializer<String> serializer = Utils.getComponent(EntityReferenceSerializer.TYPE_STRING, "path");
String filename = serializer.serialize(doc.getDocumentReference()).replaceAll("/", "_");
// Make sure we don't go over 255 chars since several filesystems don't support filename longer than that!
filename = StringUtils.abbreviateMiddle(filename, "__", 255);
context.getResponse().addHeader("Content-disposition", String.format("inline; filename=%s.%s", filename, exportType.getExtension()));
exporter.export(doc, context.getResponse().getOutputStream(), exportType, context);
return null;
}
use of com.xpn.xwiki.doc.XWikiDocument in project xwiki-platform by xwiki.
the class ExportURLFactory method renderWithSkinAction.
private void renderWithSkinAction(String spaces, String name, String wikiId, String path, XWikiContext context) throws IOException, XWikiException {
// We're simulating a Skin Action below. However we need to ensure that we set the right doc
// in the XWiki Context since this is what XWikiAction does and if we don't do this it generates
// issues since the current doc is put in the context instead of the skin. Specifically we'll
// get for example "Main.WebHome" as the current doc instead of "Main.flamingo".
// See https://jira.xwiki.org/browse/XWIKI-10922 for details.
DocumentReference dummyDocumentReference = new DocumentReference(wikiId, this.legacySpaceResolver.resolve(spaces), name);
XWikiDocument dummyDocument = context.getWiki().getDocument(dummyDocumentReference, context);
Map<String, Object> backup = new HashMap<>();
XWikiDocument.backupContext(backup, context);
try {
dummyDocument.setAsContextDoc(context);
SKINACTION.render(path, context);
} finally {
XWikiDocument.restoreContext(backup, context);
}
}
use of com.xpn.xwiki.doc.XWikiDocument in project xwiki-platform by xwiki.
the class R40000XWIKI6990DataMigration method hibernateMigrate.
@Override
public void hibernateMigrate() throws DataMigrationException, XWikiException {
final Map<Long, Long> docs = new HashMap<Long, Long>();
final List<String> customMappedClasses = new ArrayList<String>();
final Map<Long, Long> objs = new HashMap<Long, Long>();
final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();
// Get ids conversion list
getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>() {
private void fillDocumentIdConversion(Session session, Map<Long, Long> map) {
String database = getXWikiContext().getWikiId();
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from " + XWikiDocument.class.getName() + " as doc").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String space = (String) result[1];
String name = (String) result[2];
String defaultLanguage = (String) result[3];
String language = (String) result[4];
// Use a real document, since we need the language to be appended.
// TODO: Change this when the locale is integrated
XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
doc.setDefaultLanguage(defaultLanguage);
doc.setLanguage(language);
long newId = doc.getId();
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d document IDs to be converted.", map.size());
}
private void fillObjectIdConversion(Session session, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select obj.id, obj.name, obj.className, obj.number from " + BaseObject.class.getName() + " as obj").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String docName = (String) result[1];
String className = (String) result[2];
Integer number = (Integer) result[3];
BaseObjectReference objRef = new BaseObjectReference(R40000XWIKI6990DataMigration.this.resolver.resolve(className), number, R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));
if (oldId != newId) {
map.put(oldId, newId);
}
}
logProgress("Retrieved %d object IDs to be converted.", map.size());
}
private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context) throws XWikiException {
processCustomMappings(store, new CustomMappingCallback() {
@Override
public void processCustomMapping(XWikiHibernateStore store, String name, String mapping, boolean hasDynamicMapping) throws XWikiException {
if (INTERNAL.equals(mapping) || hasDynamicMapping) {
customMappedClasses.add(name);
}
}
}, context);
logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
}
private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map) {
@SuppressWarnings("unchecked") List<Object[]> results = session.createQuery("select stats.id, stats.name, stats.number from " + klass.getName() + " as stats").list();
for (Object[] result : results) {
long oldId = (Long) result[0];
String statsName = (String) result[1];
Integer number = (Integer) result[2];
// Do not try to convert broken records which would cause duplicated ids
if (statsName != null && !statsName.startsWith(".") && !statsName.endsWith(".")) {
long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);
if (oldId != newId) {
map.put(oldId, newId);
}
} else {
R40000XWIKI6990DataMigration.this.logger.debug("Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
}
}
String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(), klassName.substring(0, klassName.length() - 5).toLowerCase());
}
@Override
public Object doInHibernate(Session session) throws XWikiException {
try {
fillDocumentIdConversion(session, docs);
fillObjectIdConversion(session, objs);
// Retrieve custom mapped classes
if (getStore() instanceof XWikiHibernateStore) {
fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
}
// Retrieve statistics ID conversion
for (Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = new HashMap<Long, Long>();
fillStatsConversionMap(session, statsClass, map);
stats.add(map);
}
session.clear();
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
return null;
}
});
// Cache the configuration and the dialect
this.configuration = getStore().getConfiguration();
this.dialect = this.configuration.buildSettings().getDialect();
// Check configuration for safe mode
/* True if migration should use safe but slower non-bulk native updates. */
boolean useSafeUpdates = "1".equals(getXWikiContext().getWiki().Param("xwiki.store.migration." + this.getName() + ".safemode", "0"));
// Use safe mode if the database has no temporary table supported by hibernate
useSafeUpdates = useSafeUpdates || !this.configuration.buildSettings().getDialect().supportsTemporaryTables();
// Proceed to document id conversion
if (!docs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
}
logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(docs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> docsColl = new ArrayList<String[]>();
for (Class<?> docClass : DOC_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
for (Class<?> docClass : DOCLINK_CLASSES) {
docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
}
logProgress("Converting %d document IDs in %d tables and %d collection tables...", docs.size(), DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());
final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
convertDbId(docs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : docsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
times[this.timer++] += executeIdUpdate(doclinkClass, DOCID);
}
times[this.timer++] += executeIdUpdate(XWikiLink.class, DOCID);
times[this.timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
times[this.timer++] += executeIdUpdate(XWikiDocument.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : docsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (Class<?> doclinkClass : DOCLINK_CLASSES) {
this.logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(), times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(), times[timer++] / 1000000);
this.logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All document IDs has been converted successfully.");
} else {
logProgress("No document IDs to convert, skipping.");
}
// Proceed to object id conversion
if (!objs.isEmpty()) {
if (!useSafeUpdates) {
// Pair table,key for table that need manual updates
final List<String[]> tableToProcess = new ArrayList<String[]>();
PersistentClass objklass = getClassMapping(BaseObject.class.getName());
tableToProcess.addAll(getCollectionProperties(objklass));
for (Class<?> propertyClass : PROPERTY_CLASS) {
tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
}
for (String customClass : customMappedClasses) {
tableToProcess.addAll(getAllTableToProcess(customClass));
}
tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });
logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(objs);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
// Name of classes that need manual updates
final List<String> classToProcess = new ArrayList<String>();
// Name of custom classes that need manual updates
final List<String> customClassToProcess = new ArrayList<String>();
// Pair table,key for collection table that need manual updates
final List<String[]> objsColl = new ArrayList<String[]>();
objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
for (Class<?> propertyClass : PROPERTY_CLASS) {
String className = propertyClass.getName();
PersistentClass klass = getClassMapping(className);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
classToProcess.add(className);
}
}
for (String customClass : customMappedClasses) {
PersistentClass klass = getClassMapping(customClass);
// Add collection table that will not be updated by cascaded updates
objsColl.addAll(getCollectionProperties(klass));
// Skip classes that will be updated by cascaded updates
if (!this.fkTables.contains(klass.getTable())) {
customClassToProcess.add(customClass);
}
}
logProgress("Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...", objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());
final long[] times = new long[classToProcess.size() + 1 + customClassToProcess.size() + objsColl.size()];
convertDbId(objs, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : objsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
for (String customMappedClass : customClassToProcess) {
times[this.timer++] += executeIdUpdate(customMappedClass, ID);
}
for (String propertyClass : classToProcess) {
times[this.timer++] += executeIdUpdate(propertyClass, IDID);
}
times[this.timer++] += executeIdUpdate(BaseObject.class, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : objsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
for (String customMappedClass : customClassToProcess) {
this.logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass, times[timer++] / 1000000);
}
for (String propertyClass : classToProcess) {
this.logger.debug("Time elapsed for {} property table: {} ms", propertyClass, times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(), times[timer++] / 1000000);
}
}
logProgress("All object IDs has been converted successfully.");
} else {
logProgress("No object IDs to convert, skipping.");
}
// Proceed to statistics id conversions
for (final Class<?> statsClass : STATS_CLASSES) {
Map<Long, Long> map = stats.poll();
String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();
if (!map.isEmpty()) {
if (!useSafeUpdates) {
final List<String[]> tableToProcess = new ArrayList<String[]>();
final Map<Long, Long> statids = map;
PersistentClass statklass = getClassMapping(statsClass.getName());
tableToProcess.addAll(getCollectionProperties(statklass));
tableToProcess.add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });
logProgress("Converting %d %s statistics IDs in %d tables...", map.size(), klassName, tableToProcess.size());
final long[] times = new long[tableToProcess.size() + 1];
try {
getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback() {
@Override
public void doBulkIdUpdate() {
times[this.timer++] += insertIdUpdates(statids);
for (String[] table : tableToProcess) {
times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
}
}
});
} catch (Exception e) {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
}
if (this.logger.isDebugEnabled()) {
int timer = 0;
this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
for (String[] table : tableToProcess) {
this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
}
}
} else {
final List<String[]> statsColl = new ArrayList<String[]>();
statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));
logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...", map.size(), klassName, statsColl.size());
final long[] times = new long[statsColl.size() + 1];
convertDbId(map, new AbstractIdConversionHibernateCallback() {
@Override
public void doSingleUpdate() {
for (String[] coll : statsColl) {
times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
}
times[this.timer++] += executeIdUpdate(statsClass, ID);
}
});
if (this.logger.isDebugEnabled()) {
int timer = 0;
for (String[] coll : statsColl) {
this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
}
this.logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(), times[timer++] / 1000000);
}
}
logProgress("All %s statistics IDs has been converted successfully.", klassName);
} else {
logProgress("No %s statistics IDs to convert, skipping.", klassName);
}
}
}
use of com.xpn.xwiki.doc.XWikiDocument in project xwiki-platform by xwiki.
the class ObjectAddAction method action.
@Override
public boolean action(XWikiContext context) throws XWikiException {
// CSRF prevention
if (!csrfTokenCheck(context)) {
return false;
}
XWiki xwiki = context.getWiki();
XWikiResponse response = context.getResponse();
DocumentReference userReference = context.getUserReference();
XWikiDocument doc = context.getDoc();
ObjectAddForm oform = (ObjectAddForm) context.getForm();
String className = oform.getClassName();
EntityReference classReference = this.relativeResolver.resolve(className, EntityType.DOCUMENT);
BaseObject object = doc.newXObject(classReference, context);
BaseClass baseclass = object.getXClass(context);
// The request parameter names that correspond to object fields must NOT specify the object number because the
// object number is not known before the object is added. The following is a good parameter name:
// Space.Class_property. As a consequence we use only the class name to extract the object from the request.
Map<String, String[]> objmap = oform.getObject(className);
// We need to have a string in the map for each field for the object to be correctly created.
// Otherwise, queries using the missing properties will fail to return this object.
@SuppressWarnings("unchecked") Collection<PropertyClass> fields = baseclass.getFieldList();
for (PropertyClass property : fields) {
String name = property.getName();
if (objmap.get(name) == null) {
objmap.put(name, EMPTY_PROPERTY);
}
}
// Load the object properties that are defined in the request.
baseclass.fromMap(objmap, object);
doc.setAuthorReference(userReference);
if (doc.isNew()) {
doc.setCreatorReference(userReference);
}
xwiki.saveDocument(doc, localizePlainOrKey("core.comment.addObject"), true, context);
// If this is an ajax request, no need to redirect.
if (Utils.isAjaxRequest(context)) {
context.getResponse().setStatus(HttpServletResponse.SC_NO_CONTENT);
return false;
}
// forward to edit
String redirect = Utils.getRedirect("edit", "editor=object", "xcontinue", "xredirect");
// If the redirect URL contains the xobjectNumber parameter then inject the number of the added object as its
// value so that the target page knows which object was added.
redirect = XOBJECT_NUMBER_PARAMETER.matcher(redirect).replaceFirst("$1xobjectNumber=" + object.getNumber() + "$2");
sendRedirect(response, redirect);
return false;
}
use of com.xpn.xwiki.doc.XWikiDocument in project xwiki-platform by xwiki.
the class ObjectRemoveAction method action.
@Override
public boolean action(XWikiContext context) throws XWikiException {
// CSRF prevention
if (!csrfTokenCheck(context)) {
return false;
}
XWiki xwiki = context.getWiki();
XWikiResponse response = context.getResponse();
String username = context.getUser();
XWikiDocument doc = context.getDoc();
BaseObject obj = getObject(doc, context);
if (obj == null) {
return true;
}
doc.removeObject(obj);
doc.setAuthor(username);
xwiki.saveDocument(doc, localizePlainOrKey("core.comment.deleteObject"), true, context);
if (Utils.isAjaxRequest(context)) {
response.setStatus(HttpServletResponse.SC_NO_CONTENT);
response.setContentLength(0);
} else {
// forward to edit
String redirect = Utils.getRedirect("edit", context);
sendRedirect(response, redirect);
}
return false;
}
Aggregations