use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class NewSectionWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
setText(GT._T("Retrieving MediaWiki API"));
API api = APIFactory.getAPI();
setText(GT._T("Adding comment"));
api.addNewSection(getWikipedia(), page, section, text, true, true, false, forceWatch);
} catch (APIException e) {
return e;
}
return null;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class RandomPageWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
setText(GT._T("Retrieving MediaWiki API"));
API api = APIFactory.getAPI();
setText(GT._T("Getting random page"));
List<Page> pages = api.getRandomPages(getWikipedia(), 1, false);
if (pages.size() > 0) {
title = pages.get(0).getTitle();
} else {
title = "";
}
} catch (APIException e) {
return e;
}
return null;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class RetrieveContentWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
if (page != null) {
mw.retrieveContents(getWikipedia(), page, true, false, true, false, true);
} else {
mw.retrieveContents(getWikipedia(), pages, true, true, false, true);
}
setText("Analyzing data");
} catch (APIException e) {
return e;
}
return null;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class PageListWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
List<Page> pages = new ArrayList<>();
boolean retrieveDisambiguationInformation = true;
switch(mode) {
// List all disambiguations pages
case ALL_DAB_PAGES:
constructAllDab(pages);
retrieveDisambiguationInformation = false;
break;
// List of page back links
case BACKLINKS:
constructBackLinks(pages);
break;
// List members of a category
case CATEGORY_MEMBERS:
constructCategoryMembers(pages);
break;
// List article members of a category
case CATEGORY_MEMBERS_ARTICLES:
constructCategoryMembersArticles(pages);
break;
// List pages with disambiguation links requiring attention
case DAB_WATCH:
constructDabWatch(pages);
break;
// List pages embedding a template
case EMBEDDED_IN:
constructEmbeddedIn(pages);
break;
// List all internal links in a page
case INTERNAL_LINKS_ALL:
constructInternalLinks(pages, false, true);
break;
// List internal links in a page
case INTERNAL_LINKS_MAIN:
constructInternalLinks(pages, false, false);
break;
// List internal links in a page
case INTERNAL_LINKS_TALKPAGES_CONVERTED:
constructInternalLinks(pages, true, false);
break;
// Retrieve list of pages in a Linter category
case LINTER_CATEGORY:
constructLinterCategory(pages);
break;
// Retrieve list of pages with missing templates
case MISSING_TEMPLATES:
constructMissingTemplates(pages);
break;
// Retrieve list of protected titles with backlinks
case PROTECTED_TITLES:
constructProtectedTitles(pages);
break;
// Retrieve a special list
case QUERY_PAGE:
constructQueryPage(pages);
break;
// Search similar pages
case SEARCH_TITLES:
constructSearchTitles(pages);
break;
// List pages in the watch list
case WATCH_LIST:
constructWatchList(pages);
break;
default:
pages.addAll(constructInternalPageList());
break;
}
if (retrieveDisambiguationInformation) {
MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
List<Page> tmpPages = new ArrayList<>();
for (Page tmpPage : pages) {
if (tmpPage.isDisambiguationPage() == null) {
tmpPages.add(tmpPage);
}
}
if (!tmpPages.isEmpty()) {
mw.retrieveDisambiguationInformation(getWikipedia(), tmpPages, null, false, true, true);
}
}
if (!shouldContinue()) {
return null;
}
pageList.addAll(pages);
} catch (APIException e) {
return e;
}
return null;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class AutomaticFixingWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
/**
* @return Count of modified pages.
* @see org.wikipediacleaner.gui.swing.basic.BasicWorker#construct()
*/
@Override
public Object construct() {
try {
Page[] tmpPages = new Page[pages.length];
for (int numPage = 0; numPage < pages.length; numPage++) {
tmpPages[numPage] = DataManager.createSimplePage(getWikipedia(), pages[numPage].getTitle(), pages[numPage].getPageId(), null, null);
}
MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
Integer count = Integer.valueOf(mw.replaceText(tmpPages, replacements, getWikipedia(), comment, report, automaticCW, forceCW, save, true, true, pauseAfterEachEdit, botFix, parent));
if (showDescription && (count > 0)) {
InformationWindow.createInformationWindow(GT.__("The following modifications have been done ({0} page):", "The following modifications have been done ({0} pages):", count, count.toString()), report.getReport(getWikipedia()), true, getWikipedia());
}
return count;
} catch (APIException e) {
return e;
}
}
Aggregations