Search in sources :

Example 1 with ConnectionInformation

use of org.wikipediacleaner.api.constants.ConnectionInformation in project wpcleaner by WPCleaner.

the class MediaWikiAPI method updatePage.

/**
 * Update a page on Wikipedia.
 *
 * @param wikipedia Wikipedia.
 * @param page Page.
 * @param newContents New contents to use.
 * @param comment Comment.
 * @param bot True if the edit should be flagged as bot.
 * @param minor True if the modification should be tagged as minor.
 * @param automatic True if the modification is automatic.
 * @param forceWatch Force watching the page.
 * @return Result of the command.
 * @throws APIException Exception thrown by the API.
 */
@Override
public QueryResult updatePage(EnumWikipedia wikipedia, Page page, String newContents, String comment, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
    if (page == null) {
        throw new APIException("Page is null");
    }
    if (newContents == null) {
        throw new APIException("Contents is null");
    }
    if (comment == null) {
        throw new APIException("Comment is null");
    }
    ConnectionInformation connection = wikipedia.getConnection();
    if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
        throw new APIException("You must be logged in to update pages");
    }
    int attemptNumber = 0;
    QueryResult result = null;
    do {
        attemptNumber++;
        Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
        properties.put("assert", "user");
        if (page.getContentsTimestamp() != null) {
            properties.put("basetimestamp", page.getContentsTimestamp());
        }
        if (bot) {
            properties.put("bot", "");
        }
        if (minor) {
            properties.put("minor", "");
        }
        if (page.getStartTimestamp() != null) {
            properties.put("starttimestamp", page.getStartTimestamp());
        }
        properties.put("summary", comment);
        properties.put("text", newContents);
        properties.put("title", page.getTitle());
        if (wikipedia.getConnection().getEditToken() != null) {
            properties.put("token", wikipedia.getConnection().getEditToken());
        }
        properties.put("watchlist", forceWatch ? "watch" : "nochange");
        CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
        checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
        try {
            boolean hasCaptcha = false;
            do {
                hasCaptcha = false;
                try {
                    result = constructEdit(getRoot(wikipedia, properties, 3), "/api/edit");
                } catch (CaptchaException e) {
                    String captchaAnswer = getCaptchaAnswer(wikipedia, e);
                    if (captchaAnswer != null) {
                        properties.put("captchaid", e.getId());
                        properties.put("captchaword", captchaAnswer);
                        hasCaptcha = true;
                    } else {
                        throw new APIException("CAPTCHA", e);
                    }
                }
            } while (hasCaptcha);
        } catch (APIException e) {
            if (e.getHttpStatus() == HttpStatus.SC_GATEWAY_TIMEOUT) {
                log.warn("Gateway timeout, waiting to see if modification has been taken into account");
                waitBeforeRetrying();
                Page tmpPage = page.replicatePage();
                retrieveContents(wikipedia, Collections.singletonList(tmpPage), false, false);
                String tmpContents = tmpPage.getContents();
                if ((tmpContents != null) && (tmpContents.equals(newContents))) {
                    return QueryResult.createCorrectQuery(tmpPage.getPageId(), tmpPage.getTitle(), page.getPageId(), tmpPage.getPageId());
                }
            }
            if (attemptNumber > 1) {
                log.warn("Error updating page {}", page.getTitle());
                throw e;
            }
            EnumQueryResult queryResult = e.getQueryResult();
            if (queryResult == EnumQueryResult.BAD_TOKEN) {
                waitBeforeRetrying();
                log.warn("Retrieving tokens after a BAD_TOKEN answer");
                retrieveTokens(wikipedia);
            } else if ((queryResult != null) && (!queryResult.shouldRetry())) {
                log.warn("Error updating page {}", page.getTitle());
                throw e;
            }
        } catch (JDOMParseException e) {
            log.error("Error updating page: {}", e.getMessage());
            throw new APIException("Error parsing XML", e);
        }
    } while (result == null);
    return result;
}
Also used : JDOMParseException(org.jdom2.input.JDOMParseException) ConnectionInformation(org.wikipediacleaner.api.constants.ConnectionInformation) EnumQueryResult(org.wikipediacleaner.api.constants.EnumQueryResult) QueryResult(org.wikipediacleaner.api.data.QueryResult) APIException(org.wikipediacleaner.api.APIException) EnumQueryResult(org.wikipediacleaner.api.constants.EnumQueryResult) Page(org.wikipediacleaner.api.data.Page) EnumQueryPage(org.wikipediacleaner.api.constants.EnumQueryPage) CaptchaException(org.wikipediacleaner.api.CaptchaException)

Example 2 with ConnectionInformation

use of org.wikipediacleaner.api.constants.ConnectionInformation in project wpcleaner by WPCleaner.

the class BasicApiResult method createHttpMethod.

// ==========================================================================
// HTTP management
// ==========================================================================
/**
 * Create an HttpMethod.
 *
 * @param properties Properties to drive the API.
 * @return HttpMethod.
 */
protected HttpMethod createHttpMethod(Map<String, String> properties) {
    if (shouldSendIdentification()) {
        ConnectionInformation connection = wiki.getConnection();
        if (connection.getLgToken() != null) {
            properties.put(ApiLoginRequest.PROPERTY_TOKEN, connection.getLgToken());
        }
        if (connection.getLgUserName() != null) {
            properties.put(ApiLoginRequest.PROPERTY_USER_NAME, connection.getLgUserName());
        }
        if (connection.getLgUserId() != null) {
            properties.put(ApiLoginRequest.PROPERTY_USER_ID, connection.getLgUserId());
        }
    }
    boolean getMethod = canUseGetMethod(properties);
    Configuration config = Configuration.getConfiguration();
    boolean useHttps = !config.getBoolean(null, ConfigurationValueBoolean.FORCE_HTTP_API);
    return Hc3HttpUtils.createHttpMethod(getWiki().getSettings().getApiURL(useHttps), properties, getMethod);
}
Also used : ConnectionInformation(org.wikipediacleaner.api.constants.ConnectionInformation) Configuration(org.wikipediacleaner.utils.Configuration)

Example 3 with ConnectionInformation

use of org.wikipediacleaner.api.constants.ConnectionInformation in project wpcleaner by WPCleaner.

the class MediaWikiAPI method updateSection.

/**
 * Update a section or create a new section in a page.
 *
 * @param wikipedia Wikipedia.
 * @param page Page.
 * @param title Title of the new section.
 * @param section Section ("new" for a new section).
 * @param contents Contents.
 * @param bot True if the edit should be flagged as bot.
 * @param minor True if the modification should be tagged as minor.
 * @param forceWatch Force watching the page.
 * @param automatic True if the modification is automatic.
 * @return Result of the command.
 * @throws APIException Exception thrown by the API.
 */
private QueryResult updateSection(EnumWikipedia wikipedia, Page page, String title, String section, String contents, boolean bot, boolean minor, boolean automatic, boolean forceWatch) throws APIException {
    if (page == null) {
        throw new APIException("Page is null");
    }
    if (title == null) {
        throw new APIException("Title is null");
    }
    if (contents == null) {
        throw new APIException("Contents is null");
    }
    ConnectionInformation connection = wikipedia.getConnection();
    if ((connection.getLgToken() == null) && (connection.getLgUserId() == null) && (connection.getLgUserName() == null)) {
        throw new APIException("You must be logged in to update pages");
    }
    int attemptNumber = 0;
    QueryResult result = null;
    do {
        attemptNumber++;
        Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
        properties.put("assert", "user");
        if (page.getContentsTimestamp() != null) {
            properties.put("basetimestamp", page.getContentsTimestamp());
        }
        if (bot) {
            properties.put("bot", "");
        }
        if (minor) {
            properties.put("minor", "");
        }
        properties.put("section", section);
        properties.put("sectiontitle", title);
        String startTimestamp = page.getStartTimestamp();
        if ((startTimestamp != null) && !startTimestamp.isEmpty()) {
            properties.put("starttimestamp", startTimestamp);
        }
        String comment = title;
        properties.put("summary", comment);
        properties.put("text", contents);
        properties.put("title", page.getTitle());
        properties.put("token", wikipedia.getConnection().getEditToken());
        properties.put("watchlist", forceWatch ? "watch" : "nochange");
        CommentManager.manageComment(wikipedia.getConfiguration(), properties, "summary", "tags", automatic);
        checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
        try {
            boolean hasCaptcha = false;
            do {
                hasCaptcha = false;
                try {
                    result = constructEdit(getRoot(wikipedia, properties, 1), "/api/edit");
                } catch (CaptchaException e) {
                    String captchaAnswer = getCaptchaAnswer(wikipedia, e);
                    if (captchaAnswer != null) {
                        properties.put("captchaid", e.getId());
                        properties.put("captchaword", captchaAnswer);
                        hasCaptcha = true;
                    } else {
                        throw new APIException("CAPTCHA", e);
                    }
                }
            } while (hasCaptcha);
        } catch (APIException e) {
            if (attemptNumber > 1) {
                throw e;
            }
            if (e.getQueryResult() == EnumQueryResult.BAD_TOKEN) {
                waitBeforeRetrying();
                log.warn("Retrieving tokens after a BAD_TOKEN answer");
                retrieveTokens(wikipedia);
            }
        } catch (JDOMParseException e) {
            log.error("Error updating page: " + e.getMessage());
            throw new APIException("Error parsing XML", e);
        }
    } while (result == null);
    return result;
}
Also used : JDOMParseException(org.jdom2.input.JDOMParseException) ConnectionInformation(org.wikipediacleaner.api.constants.ConnectionInformation) EnumQueryResult(org.wikipediacleaner.api.constants.EnumQueryResult) QueryResult(org.wikipediacleaner.api.data.QueryResult) APIException(org.wikipediacleaner.api.APIException) CaptchaException(org.wikipediacleaner.api.CaptchaException)

Example 4 with ConnectionInformation

use of org.wikipediacleaner.api.constants.ConnectionInformation in project wpcleaner by WPCleaner.

the class ApiLoginRequest method login.

/**
 * Login.
 *
 * @param username User name.
 * @param password Password.
 * @return Login result.
 * @throws APIException Exception thrown by the API.
 */
public LoginResult login(String username, String password) throws APIException {
    Map<String, String> properties = getProperties(ACTION_LOGIN, result.getFormat());
    properties.put(PROPERTY_NAME, username);
    properties.put(PROPERTY_PASSWORD, password);
    ConnectionInformation connection = getWiki().getConnection();
    if ((connection != null) && (connection.getLoginToken() != null)) {
        properties.put(PROPERTY_TOKEN, connection.getLoginToken());
    }
    LoginResult loginResult = result.executeLogin(properties);
    return loginResult;
}
Also used : ConnectionInformation(org.wikipediacleaner.api.constants.ConnectionInformation) LoginResult(org.wikipediacleaner.api.data.LoginResult)

Aggregations

ConnectionInformation (org.wikipediacleaner.api.constants.ConnectionInformation)4 JDOMParseException (org.jdom2.input.JDOMParseException)2 APIException (org.wikipediacleaner.api.APIException)2 CaptchaException (org.wikipediacleaner.api.CaptchaException)2 EnumQueryResult (org.wikipediacleaner.api.constants.EnumQueryResult)2 QueryResult (org.wikipediacleaner.api.data.QueryResult)2 EnumQueryPage (org.wikipediacleaner.api.constants.EnumQueryPage)1 LoginResult (org.wikipediacleaner.api.data.LoginResult)1 Page (org.wikipediacleaner.api.data.Page)1 Configuration (org.wikipediacleaner.utils.Configuration)1