use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.
the class UpdateWarningTools method updateWarning.
// ==========================================================================
// Warning management
// ==========================================================================
/**
* Update warning for a list of pages.
*
* @param pages List of pages.
* @param creators For each page title, user who has created the page.
* @param modifiers For each page title, users who have modified the page.
* @param stats Statistics.
* @throws APIException Exception thrown by the API.
*/
public void updateWarning(List<Page> pages, Map<String, String> creators, Map<String, List<String>> modifiers, Stats stats) throws APIException {
if ((pages == null) || (pages.isEmpty())) {
return;
}
// Retrieve information in the pages
if (!retrievePageInformation(pages)) {
return;
}
// Deal with non encyclopedic pages
manageNonEncyclopedicPages(pages);
// Load talk pages and "To do" sub pages
Map<Page, Page> mapTalkPages = new HashMap<>();
Map<Page, Page> mapTodoSubpages = new HashMap<>();
for (Page page : pages) {
Page talkPage = page.getTalkPage();
mapTalkPages.put(page, talkPage);
String todoSubpageAttr = configuration.getString(WPCConfigurationString.TODO_SUBPAGE);
if (todoSubpageAttr != null) {
Page todoSubpage = talkPage.getSubPage(todoSubpageAttr);
mapTodoSubpages.put(page, todoSubpage);
}
}
if (canUpdateWarning()) {
MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
if (section0) {
mw.retrieveSectionContents(wiki, mapTalkPages.values(), 0, false);
} else {
mw.retrieveContents(wiki, mapTalkPages.values(), false, false, false, false);
}
mw.retrieveContents(wiki, mapTodoSubpages.values(), true, false, false, false);
if (mw.shouldStop()) {
return;
}
}
// Update warning
for (Page page : pages) {
PageAnalysis pageAnalysis = page.getAnalysis(page.getContents(), true);
boolean updated = updateWarning(pageAnalysis, page.getRevisionId(), mapTalkPages.get(page), mapTodoSubpages.get(page), (creators != null) ? creators.get(page.getTitle()) : null, (modifiers != null) ? modifiers.get(page.getTitle()) : null, stats);
if (updated) {
// log.debug("Page " + page.getTitle() + " has been updated.");
}
if (stats != null) {
stats.addAnalyzedPage(page);
if (updated) {
stats.addUpdatedPage(page);
}
}
}
return;
}
use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.
the class UpdateDabWarningTools method retrievePageInformation.
/**
* Retrieve information in the pages to construct the warning.
*
* @param pages List of pages.
* @return True if information was retrieved.
* @throws APIException Exception thrown by the API.
*/
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
// Retrieving links in each page
if (!linksAvailable) {
for (Page page : pages) {
mw.retrieveAllLinks(wiki, page, Namespace.MAIN, null, false, false);
}
mw.block(true);
if (shouldStop()) {
return false;
}
}
// Retrieving disambiguation information in each page
boolean hasDisambiguationLink = false;
if (!dabInformationAvailable) {
if (!wiki.isDisambiguationPagesLoaded()) {
List<Page> tmpPages = new ArrayList<>();
for (Page page : pages) {
for (int numLink = 0; numLink < page.getLinks().size(); numLink++) {
Page link = page.getLinks().get(numLink);
if (dabPages.containsKey(link.getTitle())) {
page.getLinks().set(numLink, dabPages.get(link.getTitle()));
hasDisambiguationLink = true;
} else if (nonDabPages.containsKey(link.getTitle())) {
page.getLinks().set(numLink, nonDabPages.get(link.getTitle()));
} else {
tmpPages.add(link);
}
}
}
if (!tmpPages.isEmpty()) {
mw.retrieveDisambiguationInformation(wiki, tmpPages, null, false, false, true);
}
for (Page page : tmpPages) {
if (Boolean.TRUE.equals(page.isDisambiguationPage())) {
dabPages.put(page.getTitle(), page);
hasDisambiguationLink = true;
} else {
nonDabPages.put(page.getTitle(), page);
}
}
} else {
for (Page page : pages) {
List<Page> links = page.getRedirects().getLinks();
for (int numLink = 0; numLink < links.size(); numLink++) {
Page link = links.get(numLink);
if (Boolean.TRUE.equals(wiki.isDisambiguationPage(link))) {
link.setDisambiguationPage(Boolean.TRUE);
hasDisambiguationLink = true;
} else {
link.setDisambiguationPage(Boolean.FALSE);
}
}
}
}
if (shouldStop()) {
return false;
}
}
// Retrieving page contents
if (hasDisambiguationLink && !getContentsAvailable()) {
List<Page> tmpPages = new ArrayList<>();
for (Page page : pages) {
boolean toAdd = false;
for (Page link : page.getLinks()) {
if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
toAdd = true;
}
}
if (toAdd) {
tmpPages.add(page);
}
}
if (!tmpPages.isEmpty()) {
mw.retrieveContents(wiki, tmpPages, true, false, false, false);
}
}
return true;
}
use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.
the class UpdateISBNWarningTools method retrievePageInformation.
/**
* Retrieve information in the pages to construct the warning.
*
* @param pages List of pages.
* @return True if information was retrieved.
* @throws APIException Exception thrown by the API.
*/
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
// Retrieving page contents
if (!getContentsAvailable()) {
MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
mw.retrieveContents(wiki, pages, true, false, false, true);
}
return true;
}
use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.
the class UpdateUnknownParameterWarningTools method retrievePageInformation.
/**
* Retrieve information in the pages to construct the warning.
*
* @param pages List of pages.
* @return True if information was retrieved.
* @throws APIException Exception thrown by the API.
*/
@Override
protected boolean retrievePageInformation(List<Page> pages) throws APIException {
// Retrieving page contents
if (!getContentsAvailable()) {
MediaWiki mw = MediaWiki.getMediaWikiAccess(worker);
mw.retrieveContents(wiki, pages, true, false, false, true);
}
return true;
}
use of org.wikipediacleaner.api.MediaWiki in project wpcleaner by WPCleaner.
the class FullAnalysisWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
final API api = APIFactory.getAPI();
EnumWikipedia wiki = getWikipedia();
mw.retrieveContents(wiki, page, false, false, false, true, false);
api.retrieveLinks(wiki, page, Namespace.MAIN, knownPages, true, true);
// Retrieve disambiguation information if not already retrieved
List<Page> links = new ArrayList<>();
for (Page link : page.getLinks()) {
if (link.isDisambiguationPage() == null) {
links.add(link);
}
}
if (!links.isEmpty()) {
mw.retrieveDisambiguationInformation(wiki, links, knownPages, true, false, true);
}
// Retrieve more information on disambiguation pages
for (Page link : page.getLinks()) {
if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
Iterator<Page> itLink = link.getRedirects().getIteratorWithPage();
while (itLink.hasNext()) {
Page link2 = itLink.next();
if (!link2.getRedirects().isRedirect()) {
mw.retrieveAllLinks(wiki, link2, null, knownPages, false, false);
}
if (link.hasWiktionaryTemplate() && (link.getContents() == null)) {
mw.retrieveContents(wiki, link2, false, false, false, true, false);
}
}
}
}
if (CheckErrorAlgorithms.isAlgorithmActive(wiki, 508)) {
mw.retrieveAllTemplates(wiki, page, false);
}
mw.block(true);
if (Boolean.FALSE.equals(page.isExisting())) {
mw.retrieveSimilarPages(wiki, page);
}
setText("Analyzing data");
PageAnalysis analysis = page.getAnalysis(page.getContents(), true);
AlgorithmError.analyzeErrors(algorithms, analysis, false);
} catch (APIException e) {
return e;
}
return null;
}
Aggregations