Search in sources :

Example 1 with MediaWiki

use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.

the class UpdateWarningTools method updateWarning.

// ==========================================================================
// Warning management
// ==========================================================================
/**
 * Update warning for a list of pages.
 *
 * @param pages List of pages.
 * @param creators For each page title, user who has created the page.
 * @param modifiers For each page title, users who have modified the page.
 * @param stats Statistics.
 * @throws APIException Exception thrown by the API.
 */
public void updateWarning(List<Page> pages, Map<String, String> creators, Map<String, List<String>> modifiers, Stats stats) throws APIException {
    if ((pages == null) || (pages.isEmpty())) {
        return;
    }
    // Retrieve information in the pages
    if (!retrievePageInformation(pages)) {
        return;
    }
    // Deal with non encyclopedic pages
    manageNonEncyclopedicPages(pages);
    // Load talk pages and "To do" sub pages
    Map<Page, Page> mapTalkPages = new HashMap<>();
    Map<Page, Page> mapTodoSubpages = new HashMap<>();
    for (Page page : pages) {
        Page talkPage = page.getTalkPage();
        mapTalkPages.put(page, talkPage);
        String todoSubpageAttr = configuration.getString(WPCConfigurationString.TODO_SUBPAGE);
        if (todoSubpageAttr != null) {
            Page todoSubpage = talkPage.getSubPage(todoSubpageAttr);
            mapTodoSubpages.put(page, todoSubpage);
        }
    }
    if (canUpdateWarning()) {
        MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
        if (section0) {
            mw.retrieveSectionContents(wiki, mapTalkPages.values(), 0, false);
        } else {
            mw.retrieveContents(wiki, mapTalkPages.values(), false, false, false, false);
        }
        mw.retrieveContents(wiki, mapTodoSubpages.values(), true, false, false, false);
        if (mw.shouldStop()) {
            return;
        }
    }
    // Update warning
    for (Page page : pages) {
        PageAnalysis pageAnalysis = page.getAnalysis(page.getContents(), true);
        boolean updated = updateWarning(pageAnalysis, page.getRevisionId(), mapTalkPages.get(page), mapTodoSubpages.get(page), (creators != null) ? creators.get(page.getTitle()) : null, (modifiers != null) ? modifiers.get(page.getTitle()) : null, stats);
        if (updated) {
        // log.debug("Page " + page.getTitle() + " has been updated.");
        }
        if (stats != null) {
            stats.addAnalyzedPage(page);
            if (updated) {
                stats.addUpdatedPage(page);
            }
        }
    }
    return;
}
Also used : HashMap(java.util.HashMap) PageAnalysis(org.wikipediacleaner.api.data.analysis.PageAnalysis) Page(org.wikipediacleaner.api.data.Page) WPCConfigurationString(org.wikipediacleaner.api.configuration.WPCConfigurationString) ConfigurationValueString(org.wikipediacleaner.utils.ConfigurationValueString) MediaWiki(org.wikipediacleaner.api.MediaWiki)

Example 2 with MediaWiki

use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.

the class UpdateDabWarningTools method retrievePageInformation.

/**
 * Retrieve information in the pages to construct the warning.
 *
 * @param pages List of pages.
 * @return True if information was retrieved.
 * @throws APIException Exception thrown by the API.
 */
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
    MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
    // Retrieving links in each page
    if (!linksAvailable) {
        for (Page page : pages) {
            mw.retrieveAllLinks(wiki, page, Namespace.MAIN, null, false, false);
        }
        mw.block(true);
        if (shouldStop()) {
            return false;
        }
    }
    // Retrieving disambiguation information in each page
    boolean hasDisambiguationLink = false;
    if (!dabInformationAvailable) {
        if (!wiki.isDisambiguationPagesLoaded()) {
            List<Page> tmpPages = new ArrayList<>();
            for (Page page : pages) {
                for (int numLink = 0; numLink < page.getLinks().size(); numLink++) {
                    Page link = page.getLinks().get(numLink);
                    if (dabPages.containsKey(link.getTitle())) {
                        page.getLinks().set(numLink, dabPages.get(link.getTitle()));
                        hasDisambiguationLink = true;
                    } else if (nonDabPages.containsKey(link.getTitle())) {
                        page.getLinks().set(numLink, nonDabPages.get(link.getTitle()));
                    } else {
                        tmpPages.add(link);
                    }
                }
            }
            if (!tmpPages.isEmpty()) {
                mw.retrieveDisambiguationInformation(wiki, tmpPages, null, false, false, true);
            }
            for (Page page : tmpPages) {
                if (Boolean.TRUE.equals(page.isDisambiguationPage())) {
                    dabPages.put(page.getTitle(), page);
                    hasDisambiguationLink = true;
                } else {
                    nonDabPages.put(page.getTitle(), page);
                }
            }
        } else {
            for (Page page : pages) {
                List<Page> links = page.getRedirects().getLinks();
                for (int numLink = 0; numLink < links.size(); numLink++) {
                    Page link = links.get(numLink);
                    if (Boolean.TRUE.equals(wiki.isDisambiguationPage(link))) {
                        link.setDisambiguationPage(Boolean.TRUE);
                        hasDisambiguationLink = true;
                    } else {
                        link.setDisambiguationPage(Boolean.FALSE);
                    }
                }
            }
        }
        if (shouldStop()) {
            return false;
        }
    }
    // Retrieving page contents
    if (hasDisambiguationLink && !getContentsAvailable()) {
        List<Page> tmpPages = new ArrayList<>();
        for (Page page : pages) {
            boolean toAdd = false;
            for (Page link : page.getLinks()) {
                if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
                    toAdd = true;
                }
            }
            if (toAdd) {
                tmpPages.add(page);
            }
        }
        if (!tmpPages.isEmpty()) {
            mw.retrieveContents(wiki, tmpPages, true, false, false, false);
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) Page(org.wikipediacleaner.api.data.Page) MediaWiki(org.wikipediacleaner.api.MediaWiki)

Example 3 with MediaWiki

use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.

the class UpdateISBNWarningTools method retrievePageInformation.

/**
 * Retrieve information in the pages to construct the warning.
 *
 * @param pages List of pages.
 * @return True if information was retrieved.
 * @throws APIException Exception thrown by the API.
 */
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
    // Retrieving page contents
    if (!getContentsAvailable()) {
        MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
        mw.retrieveContents(wiki, pages, true, false, false, true);
    }
    return true;
}
Also used : MediaWiki(org.wikipediacleaner.api.MediaWiki)

Example 4 with MediaWiki

use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.

the class UpdateUnknownParameterWarningTools method retrievePageInformation.

/**
 * Retrieve information in the pages to construct the warning.
 *
 * @param pages List of pages.
 * @return True if information was retrieved.
 * @throws APIException Exception thrown by the API.
 */
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
    // Retrieving page contents
    if (!getContentsAvailable()) {
        MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
        mw.retrieveContents(wiki, pages, true, false, false, true);
    }
    return true;
}
Also used : MediaWiki(org.wikipediacleaner.api.MediaWiki)

Example 5 with MediaWiki

use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.

the class FullAnalysisWorker method construct.

/* (non-Javadoc)
   * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
   */
@Override
public Object construct() {
    try {
        MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
        final API api = APIFactory.getAPI();
        EnumWikipedia wiki = getWikipedia();
        mw.retrieveContents(wiki, page, false, false, false, true, false);
        api.retrieveLinks(wiki, page, Namespace.MAIN, knownPages, true, true);
        // Retrieve disambiguation information if not already retrieved
        List<Page> links = new ArrayList<>();
        for (Page link : page.getLinks()) {
            if (link.isDisambiguationPage() == null) {
                links.add(link);
            }
        }
        if (!links.isEmpty()) {
            mw.retrieveDisambiguationInformation(wiki, links, knownPages, true, false, true);
        }
        // Retrieve more information on disambiguation pages
        for (Page link : page.getLinks()) {
            if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
                Iterator<Page> itLink = link.getRedirects().getIteratorWithPage();
                while (itLink.hasNext()) {
                    Page link2 = itLink.next();
                    if (!link2.getRedirects().isRedirect()) {
                        mw.retrieveAllLinks(wiki, link2, null, knownPages, false, false);
                    }
                    if (link.hasWiktionaryTemplate() && (link.getContents() == null)) {
                        mw.retrieveContents(wiki, link2, false, false, false, true, false);
                    }
                }
            }
        }
        if (CheckErrorAlgorithms.isAlgorithmActive(wiki, 508)) {
            mw.retrieveAllTemplates(wiki, page, false);
        }
        mw.block(true);
        if (Boolean.FALSE.equals(page.isExisting())) {
            mw.retrieveSimilarPages(wiki, page);
        }
        setText("Analyzing data");
        PageAnalysis analysis = page.getAnalysis(page.getContents(), true);
        AlgorithmError.analyzeErrors(algorithms, analysis, false);
    } catch (APIException e) {
        return e;
    }
    return null;
}
Also used : APIException(org.wikipediacleaner.api.APIException) ArrayList(java.util.ArrayList) EnumWikipedia(org.wikipediacleaner.api.constants.EnumWikipedia) PageAnalysis(org.wikipediacleaner.api.data.analysis.PageAnalysis) API(org.wikipediacleaner.api.API) Page(org.wikipediacleaner.api.data.Page) MediaWiki(org.wikipediacleaner.api.MediaWiki)

Aggregations

MediaWiki (org.wikipediacleaner.api.MediaWiki)20 Page (org.wikipediacleaner.api.data.Page)12 APIException (org.wikipediacleaner.api.APIException)9 ArrayList (java.util.ArrayList)6 EnumQueryPage (org.wikipediacleaner.api.constants.EnumQueryPage)6 API (org.wikipediacleaner.api.API)4 EnumWikipedia (org.wikipediacleaner.api.constants.EnumWikipedia)4 WPCConfigurationString (org.wikipediacleaner.api.configuration.WPCConfigurationString)3 PageAnalysis (org.wikipediacleaner.api.data.analysis.PageAnalysis)3 HashMap (java.util.HashMap)1 Properties (java.util.Properties)1 CheckErrorPage (org.wikipediacleaner.api.check.CheckErrorPage)1 CheckWiki (org.wikipediacleaner.api.check.CheckWiki)1 CheckErrorAlgorithm (org.wikipediacleaner.api.check.algorithm.CheckErrorAlgorithm)1 CWConfigurationError (org.wikipediacleaner.api.configuration.CWConfigurationError)1 WPCConfiguration (org.wikipediacleaner.api.configuration.WPCConfiguration)1 PageElementInternalLink (org.wikipediacleaner.api.data.PageElementInternalLink)1 Configuration (org.wikipediacleaner.utils.Configuration)1 ConfigurationValueString (org.wikipediacleaner.utils.ConfigurationValueString)1