use of org.wikipediacleaner.api.CaptchaException in project wpcleaner by WPCleaner.
the class MediaWikiAPI method constructEdit.
/**
* @param root Root element in MediaWiki answer.
*
* @param query Path to the answer.
* @return Result of the query.
* @throws APIException Exception thrown by the API.
* @throws CaptchaException Captcha.
*/
private QueryResult constructEdit(Element root, String query) throws APIException, CaptchaException {
XPathExpression<Element> xpa = XPathFactory.instance().compile(query, Filters.element());
Element node = xpa.evaluateFirst(root);
if (node != null) {
String result = node.getAttributeValue("result");
if ("Success".equalsIgnoreCase(result)) {
Integer pageId = null;
try {
pageId = Integer.valueOf(node.getAttributeValue("pageid"));
} catch (NumberFormatException e) {
//
}
Integer pageOldRevId = null;
try {
pageOldRevId = Integer.valueOf(node.getAttributeValue("oldrevid"));
} catch (NumberFormatException e) {
//
}
Integer pageNewRevId = null;
try {
pageNewRevId = Integer.valueOf(node.getAttributeValue("newrevid"));
} catch (NumberFormatException e) {
//
}
return QueryResult.createCorrectQuery(pageId, node.getAttributeValue("title"), pageOldRevId, pageNewRevId);
} else if ("Failure".equalsIgnoreCase(result)) {
XPathExpression<Element> xpaCaptcha = XPathFactory.instance().compile("./captcha", Filters.element());
Element captcha = xpaCaptcha.evaluateFirst(node);
if (captcha != null) {
CaptchaException exception = new CaptchaException("Captcha", captcha.getAttributeValue("type"));
exception.setMime(captcha.getAttributeValue("mime"));
exception.setId(captcha.getAttributeValue("id"));
exception.setURL(captcha.getAttributeValue("url"));
throw exception;
}
String spamBlacklist = node.getAttributeValue("spamblacklist");
if (spamBlacklist != null) {
throw new APIException(GT._T("URL {0} is blacklisted", spamBlacklist));
}
throw new APIException(xmlOutputter.outputString(node));
}
return QueryResult.createErrorQuery(result, node.getAttributeValue("details"), node.getAttributeValue("wait"));
}
return QueryResult.createErrorQuery(null, null, null);
}
use of org.wikipediacleaner.api.CaptchaException in project wpcleaner by WPCleaner.
the class MediaWikiAPI method updatePage.
/**
* Update a page on Wikipedia.
*
* @param wikipedia Wikipedia.
* @param page Page.
* @param newContents New contents to use.
* @param comment Comment.
* @param bot True if the edit should be flagged as bot.
* @param minor True if the modification should be tagged as minor.
* @param automatic True if the modification is automatic.
* @param forceWatch Force watching the page.
* @return Result of the command.
* @throws APIException Exception thrown by the API.
*/
@Override
public QueryResult updatePage(EnumWikipedia wikipedia, Page page, String newContents, String comment, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
if (page == null) {
throw new APIException("Page is null");
}
if (newContents == null) {
throw new APIException("Contents is null");
}
if (comment == null) {
throw new APIException("Comment is null");
}
ConnectionInformation connection = wikipedia.getConnection();
if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
throw new APIException("You must be logged in to update pages");
}
int attemptNumber = 0;
QueryResult result = null;
do {
attemptNumber++;
Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
properties.put("assert", "user");
if (page.getContentsTimestamp() != null) {
properties.put("basetimestamp", page.getContentsTimestamp());
}
if (bot) {
properties.put("bot", "");
}
if (minor) {
properties.put("minor", "");
}
if (page.getStartTimestamp() != null) {
properties.put("starttimestamp", page.getStartTimestamp());
}
properties.put("summary", comment);
properties.put("text", newContents);
properties.put("title", page.getTitle());
if (wikipedia.getConnection().getEditToken() != null) {
properties.put("token", wikipedia.getConnection().getEditToken());
}
properties.put("watchlist", forceWatch ? "watch" : "nochange");
CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
try {
boolean hasCaptcha = false;
do {
hasCaptcha = false;
try {
result = constructEdit(getRoot(wikipedia, properties, 3), "/api/edit");
} catch (CaptchaException e) {
String captchaAnswer = getCaptchaAnswer(wikipedia, e);
if (captchaAnswer != null) {
properties.put("captchaid", e.getId());
properties.put("captchaword", captchaAnswer);
hasCaptcha = true;
} else {
throw new APIException("CAPTCHA", e);
}
}
} while (hasCaptcha);
} catch (APIException e) {
if (e.getHttpStatus() == HttpStatus.SC_GATEWAY_TIMEOUT) {
log.warn("Gateway timeout, waiting to see if modification has been taken into account");
waitBeforeRetrying();
Page tmpPage = page.replicatePage();
retrieveContents(wikipedia, Collections.singletonList(tmpPage), false, false);
String tmpContents = tmpPage.getContents();
if ((tmpContents != null) && (tmpContents.equals(newContents))) {
return QueryResult.createCorrectQuery(tmpPage.getPageId(), tmpPage.getTitle(), page.getPageId(), tmpPage.getPageId());
}
}
if (attemptNumber > 1) {
log.warn("Error updating page {}", page.getTitle());
throw e;
}
EnumQueryResult queryResult = e.getQueryResult();
if (queryResult == EnumQueryResult.BAD_TOKEN) {
waitBeforeRetrying();
log.warn("Retrieving tokens after a BAD_TOKEN answer");
retrieveTokens(wikipedia);
} else if ((queryResult != null) && (!queryResult.shouldRetry())) {
log.warn("Error updating page {}", page.getTitle());
throw e;
}
} catch (JDOMParseException e) {
log.error("Error updating page: {}", e.getMessage());
throw new APIException("Error parsing XML", e);
}
} while (result == null);
return result;
}
use of org.wikipediacleaner.api.CaptchaException in project wpcleaner by WPCleaner.
the class MediaWikiAPI method updateSection.
/**
* Update a section or create a new section in a page.
*
* @param wikipedia Wikipedia.
* @param page Page.
* @param title Title of the new section.
* @param section Section ("new" for a new section).
* @param contents Contents.
* @param bot True if the edit should be flagged as bot.
* @param minor True if the modification should be tagged as minor.
* @param forceWatch Force watching the page.
* @param automatic True if the modification is automatic.
* @return Result of the command.
* @throws APIException Exception thrown by the API.
*/
private QueryResult updateSection(EnumWikipedia wikipedia, Page page, String title, String section, String contents, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
if (page == null) {
throw new APIException("Page is null");
}
if (title == null) {
throw new APIException("Title is null");
}
if (contents == null) {
throw new APIException("Contents is null");
}
ConnectionInformation connection = wikipedia.getConnection();
if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
throw new APIException("You must be logged in to update pages");
}
int attemptNumber = 0;
QueryResult result = null;
do {
attemptNumber++;
Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
properties.put("assert", "user");
if (page.getContentsTimestamp() != null) {
properties.put("basetimestamp", page.getContentsTimestamp());
}
if (bot) {
properties.put("bot", "");
}
if (minor) {
properties.put("minor", "");
}
properties.put("section", section);
properties.put("sectiontitle", title);
String startTimestamp = page.getStartTimestamp();
if ((startTimestamp != null) && !startTimestamp.isEmpty()) {
properties.put("starttimestamp", startTimestamp);
}
String comment = title;
properties.put("summary", comment);
properties.put("text", contents);
properties.put("title", page.getTitle());
properties.put("token", wikipedia.getConnection().getEditToken());
properties.put("watchlist", forceWatch ? "watch" : "nochange");
CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
try {
boolean hasCaptcha = false;
do {
hasCaptcha = false;
try {
result = constructEdit(getRoot(wikipedia, properties, 1), "/api/edit");
} catch (CaptchaException e) {
String captchaAnswer = getCaptchaAnswer(wikipedia, e);
if (captchaAnswer != null) {
properties.put("captchaid", e.getId());
properties.put("captchaword", captchaAnswer);
hasCaptcha = true;
} else {
throw new APIException("CAPTCHA", e);
}
}
} while (hasCaptcha);
} catch (APIException e) {
if (attemptNumber > 1) {
throw e;
}
if (e.getQueryResult() == EnumQueryResult.BAD_TOKEN) {
waitBeforeRetrying();
log.warn("Retrieving tokens after a BAD_TOKEN answer");
retrieveTokens(wikipedia);
}
} catch (JDOMParseException e) {
log.error("Error updating page: " + e.getMessage());
throw new APIException("Error parsing XML", e);
}
} while (result == null);
return result;
}
Aggregations