use of org.wikipediacleaner.api.data.QueryResult in project wpcleaner by WPCleaner.
the class MediaWikiAPI method updatePage.
/**
* Update a page on Wikipedia.
*
* @param wikipedia Wikipedia.
* @param page Page.
* @param newContents New contents to use.
* @param comment Comment.
* @param bot True if the edit should be flagged as bot.
* @param minor True if the modification should be tagged as minor.
* @param automatic True if the modification is automatic.
* @param forceWatch Force watching the page.
* @return Result of the command.
* @throws APIException Exception thrown by the API.
*/
@Override
public QueryResult updatePage(EnumWikipedia wikipedia, Page page, String newContents, String comment, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
if (page == null) {
throw new APIException("Page is null");
}
if (newContents == null) {
throw new APIException("Contents is null");
}
if (comment == null) {
throw new APIException("Comment is null");
}
ConnectionInformation connection = wikipedia.getConnection();
if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
throw new APIException("You must be logged in to update pages");
}
int attemptNumber = 0;
QueryResult result = null;
do {
attemptNumber++;
Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
properties.put("assert", "user");
if (page.getContentsTimestamp() != null) {
properties.put("basetimestamp", page.getContentsTimestamp());
}
if (bot) {
properties.put("bot", "");
}
if (minor) {
properties.put("minor", "");
}
if (page.getStartTimestamp() != null) {
properties.put("starttimestamp", page.getStartTimestamp());
}
properties.put("summary", comment);
properties.put("text", newContents);
properties.put("title", page.getTitle());
if (wikipedia.getConnection().getEditToken() != null) {
properties.put("token", wikipedia.getConnection().getEditToken());
}
properties.put("watchlist", forceWatch ? "watch" : "nochange");
CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
try {
boolean hasCaptcha = false;
do {
hasCaptcha = false;
try {
result = constructEdit(getRoot(wikipedia, properties, 3), "/api/edit");
} catch (CaptchaException e) {
String captchaAnswer = getCaptchaAnswer(wikipedia, e);
if (captchaAnswer != null) {
properties.put("captchaid", e.getId());
properties.put("captchaword", captchaAnswer);
hasCaptcha = true;
} else {
throw new APIException("CAPTCHA", e);
}
}
} while (hasCaptcha);
} catch (APIException e) {
if (e.getHttpStatus() == HttpStatus.SC_GATEWAY_TIMEOUT) {
log.warn("Gateway timeout, waiting to see if modification has been taken into account");
waitBeforeRetrying();
Page tmpPage = page.replicatePage();
retrieveContents(wikipedia, Collections.singletonList(tmpPage), false, false);
String tmpContents = tmpPage.getContents();
if ((tmpContents != null) && (tmpContents.equals(newContents))) {
return QueryResult.createCorrectQuery(tmpPage.getPageId(), tmpPage.getTitle(), page.getPageId(), tmpPage.getPageId());
}
}
if (attemptNumber > 1) {
log.warn("Error updating page {}", page.getTitle());
throw e;
}
EnumQueryResult queryResult = e.getQueryResult();
if (queryResult == EnumQueryResult.BAD_TOKEN) {
waitBeforeRetrying();
log.warn("Retrieving tokens after a BAD_TOKEN answer");
retrieveTokens(wikipedia);
} else if ((queryResult != null) && (!queryResult.shouldRetry())) {
log.warn("Error updating page {}", page.getTitle());
throw e;
}
} catch (JDOMParseException e) {
log.error("Error updating page: {}", e.getMessage());
throw new APIException("Error parsing XML", e);
}
} while (result == null);
return result;
}
use of org.wikipediacleaner.api.data.QueryResult in project wpcleaner by WPCleaner.
the class SendWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
setText(GT._T("Retrieving MediaWiki API"));
API api = APIFactory.getAPI();
// Updating page contents
QueryResult queryResult = null;
try {
setText(GT._T("Updating page contents"));
queryResult = api.updatePage(getWikipedia(), page, text, comment, bot, minor, false, forceWatch);
} catch (APIException e) {
return e;
}
// Take contributions into account
if ((contributions != null) && (getWikipedia().getContributions() != null)) {
getWikipedia().getContributions().increaseContributions(contributions);
}
// Updating disambiguation warning
if (params.updateDabWarning) {
try {
UpdateDabWarningTools dabWarningTools = new UpdateDabWarningTools(getWikipedia(), this, params.createDabWarning, false);
PageAnalysis pageAnalysis = page.getAnalysis(text, true);
dabWarningTools.updateWarning(pageAnalysis, queryResult.getPageNewRevId(), null, null, null, null, null);
} catch (APIException e) {
return e;
}
}
// Updating ISBN warning
if (params.updateISBNWarning) {
try {
UpdateISBNWarningTools isbnWarningTools = new UpdateISBNWarningTools(getWikipedia(), this, params.createISBNWarning, false);
PageAnalysis pageAnalysis = page.getAnalysis(text, true);
isbnWarningTools.updateWarning(pageAnalysis, queryResult.getPageNewRevId(), null, null, null, null, null);
} catch (APIException e) {
return e;
}
}
// Updating ISSN warning
if (params.updateISSNWarning) {
try {
UpdateISSNWarningTools issnWarningTools = new UpdateISSNWarningTools(getWikipedia(), this, params.createISSNWarning, false);
PageAnalysis pageAnalysis = page.getAnalysis(text, true);
issnWarningTools.updateWarning(pageAnalysis, queryResult.getPageNewRevId(), null, null, null, null, null);
} catch (APIException e) {
return e;
}
}
// Updating duplicate arguments warning
if (params.updateDuplicateArgsWarning) {
try {
UpdateDuplicateArgsWarningTools duplicateArgsWarningTools = new UpdateDuplicateArgsWarningTools(getWikipedia(), this, params.createDuplicateArgsWarning, false);
PageAnalysis pageAnalysis = page.getAnalysis(text, true);
duplicateArgsWarningTools.updateWarning(pageAnalysis, queryResult.getPageNewRevId(), null, null, null, null, null);
} catch (APIException e) {
return e;
}
}
// Updating unknown parameter warning
if (params.updateUnknownParameterWarning) {
try {
UpdateUnknownParameterWarningTools unknownParameterWarningTools = new UpdateUnknownParameterWarningTools(getWikipedia(), this, params.createUnknownParameterWarning, false);
PageAnalysis pageAnalysis = page.getAnalysis(text, true);
unknownParameterWarningTools.updateWarning(pageAnalysis, queryResult.getPageNewRevId(), null, null, null, null, null);
} catch (APIException e) {
return e;
}
}
// Mark errors fixed
if (errorsFixed != null) {
for (AlgorithmError.Progress error : errorsFixed) {
CheckErrorAlgorithm algorithm = error.algorithm;
if ((algorithm.getPriority() != CWConfigurationError.PRIORITY_BOT_ONLY) && (error.full)) {
OnePageWindow.markPageAsFixed(algorithm.getErrorNumberString(), page);
}
}
}
return null;
}
use of org.wikipediacleaner.api.data.QueryResult in project wpcleaner by WPCleaner.
the class MediaWikiAPI method updateSection.
/**
* Update a section or create a new section in a page.
*
* @param wikipedia Wikipedia.
* @param page Page.
* @param title Title of the new section.
* @param section Section ("new" for a new section).
* @param contents Contents.
* @param bot True if the edit should be flagged as bot.
* @param minor True if the modification should be tagged as minor.
* @param forceWatch Force watching the page.
* @param automatic True if the modification is automatic.
* @return Result of the command.
* @throws APIException Exception thrown by the API.
*/
private QueryResult updateSection(EnumWikipedia wikipedia, Page page, String title, String section, String contents, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
if (page == null) {
throw new APIException("Page is null");
}
if (title == null) {
throw new APIException("Title is null");
}
if (contents == null) {
throw new APIException("Contents is null");
}
ConnectionInformation connection = wikipedia.getConnection();
if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
throw new APIException("You must be logged in to update pages");
}
int attemptNumber = 0;
QueryResult result = null;
do {
attemptNumber++;
Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
properties.put("assert", "user");
if (page.getContentsTimestamp() != null) {
properties.put("basetimestamp", page.getContentsTimestamp());
}
if (bot) {
properties.put("bot", "");
}
if (minor) {
properties.put("minor", "");
}
properties.put("section", section);
properties.put("sectiontitle", title);
String startTimestamp = page.getStartTimestamp();
if ((startTimestamp != null) && !startTimestamp.isEmpty()) {
properties.put("starttimestamp", startTimestamp);
}
String comment = title;
properties.put("summary", comment);
properties.put("text", contents);
properties.put("title", page.getTitle());
properties.put("token", wikipedia.getConnection().getEditToken());
properties.put("watchlist", forceWatch ? "watch" : "nochange");
CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
try {
boolean hasCaptcha = false;
do {
hasCaptcha = false;
try {
result = constructEdit(getRoot(wikipedia, properties, 1), "/api/edit");
} catch (CaptchaException e) {
String captchaAnswer = getCaptchaAnswer(wikipedia, e);
if (captchaAnswer != null) {
properties.put("captchaid", e.getId());
properties.put("captchaword", captchaAnswer);
hasCaptcha = true;
} else {
throw new APIException("CAPTCHA", e);
}
}
} while (hasCaptcha);
} catch (APIException e) {
if (attemptNumber > 1) {
throw e;
}
if (e.getQueryResult() == EnumQueryResult.BAD_TOKEN) {
waitBeforeRetrying();
log.warn("Retrieving tokens after a BAD_TOKEN answer");
retrieveTokens(wikipedia);
}
} catch (JDOMParseException e) {
log.error("Error updating page: " + e.getMessage());
throw new APIException("Error parsing XML", e);
}
} while (result == null);
return result;
}
Aggregations