use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class UpdateWarningWorker method retrieveCheckWikiPages.
/**
* Retrieve pages for a given error number.
*
* @param errorNumber Error number.
* @param pages Map of (title,page) to complete.
* @param tools Update warning tools if the pages should be added as articles.
*/
protected void retrieveCheckWikiPages(int errorNumber, Map<String, Page> pages, UpdateWarningTools tools) {
CheckWiki cw = APIFactory.getCheckWiki();
EnumWikipedia wiki = getWikipedia();
CheckErrorAlgorithm algorithm = CheckErrorAlgorithms.getAlgorithm(wiki, errorNumber);
List<AlgorithmError> errors = new ArrayList<>();
try {
cw.retrievePages(algorithm, 100000, wiki, errors);
for (AlgorithmError error : errors) {
for (int pageNum = 0; pageNum < error.getPageCount(); pageNum++) {
Page page = error.getPage(pageNum);
addPage(page, pages);
if (tools != null) {
tools.addArticle(page.getTitle());
}
}
}
} catch (APIException e) {
// Nothing
}
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class TranslateWorker method construct.
/**
* @return Translated text.
* @see org.wikipediacleaner.gui.swing.basic.BasicWorker#construct()
*/
@Override
public Object construct() {
String text = initialText;
try {
Configuration config = Configuration.getConfiguration();
text = translateInternalLinks(text, config.getBoolean(null, ConfigurationValueBoolean.TRANSLATION_INTERNAL_LINK_TEXT), config.getBoolean(null, ConfigurationValueBoolean.TRANSLATION_INTERLANGUAGE));
text = translateCategories(text, config.getBoolean(null, ConfigurationValueBoolean.TRANSLATION_CATEGORY));
text = translateTemplates(text, config.getBoolean(null, ConfigurationValueBoolean.TRANSLATION_TEMPLATE_NAME), config.getBoolean(null, ConfigurationValueBoolean.TRANSLATION_TEMPLATE_NO_PARAM));
} catch (APIException e) {
return null;
}
return text;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class UpdateISSNWarningWorker method displayResult.
/**
* Display results.
*
* @param stats Statistics.
* @param startTime Start time.
* @param errors Errors found.
*/
private void displayResult(Stats stats, long startTime, Map<String, List<String>> errors) {
if (useList) {
return;
}
// Errors
if (errors != null) {
// Configuration
EnumWikipedia wiki = getWikipedia();
// Compute synthesis
StringBuilder buffer = new StringBuilder();
List<String> keys = new ArrayList<>(errors.keySet());
Collections.sort(keys);
for (String key : keys) {
List<String> values = errors.get(key);
buffer.append("* ");
if (values != null) {
buffer.append(values.size());
buffer.append(" x ");
}
buffer.append("ISSN ");
buffer.append(key);
buffer.append(" : ");
if (values != null) {
Collections.sort(values);
int valueNum = 0;
while (valueNum < values.size()) {
if (valueNum > 0) {
buffer.append(", ");
}
String value = values.get(valueNum);
int begin = valueNum;
while ((valueNum < values.size()) && (values.get(valueNum).equals(value))) {
valueNum++;
}
if (valueNum > begin + 1) {
buffer.append(valueNum - begin);
buffer.append(" x ");
}
buffer.append("[[");
buffer.append(value);
buffer.append("]]");
}
}
buffer.append("\n");
}
// Update synthesis on dedicated page
WPCConfiguration config = wiki.getConfiguration();
String pageName = config.getString(WPCConfigurationString.ISSN_ERRORS_PAGE);
boolean saved = false;
if ((pageName != null) && (pageName.trim().length() > 0)) {
boolean updatePage = false;
if (simulation && (getWindow() != null)) {
int answer = Utilities.displayYesNoWarning(getWindow().getParentComponent(), GT._T("Do you want to update {0}?", pageName));
if (answer == JOptionPane.YES_OPTION) {
updatePage = true;
}
} else {
updatePage = true;
}
if (updatePage) {
try {
Page page = DataManager.createSimplePage(wiki, pageName, null, null, null);
API api = APIFactory.getAPI();
api.retrieveContents(wiki, Collections.singletonList(page), false, false);
String contents = page.getContents();
if (contents != null) {
int begin = -1;
int end = -1;
for (ContentsComment comment : page.getAnalysis(contents, true).comments().getAll()) {
String value = comment.getComment().trim();
if ("BOT BEGIN".equals(value)) {
if (begin < 0) {
begin = comment.getEndIndex();
}
} else if ("BOT END".equals(value)) {
end = comment.getBeginIndex();
}
}
if ((begin >= 0) && (end > begin)) {
StringBuilder newText = new StringBuilder();
newText.append(contents.substring(0, begin));
newText.append("\n");
newText.append(buffer.toString());
newText.append(contents.substring(end));
api.updatePage(wiki, page, newText.toString(), config.getString(WPCConfigurationString.ISSN_ERRORS_PAGE_COMMENT), false, true, true, false);
saved = true;
}
}
} catch (APIException e) {
// Nothing
}
}
}
// Display synthesis
if (!saved && (getWindow() != null)) {
InformationWindow.createInformationWindow("ISSN", buffer.toString(), false, getWikipedia());
}
}
// Statistics
displayStats(stats, startTime);
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class CheckWikiProjectWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
// Retrieving errors
boolean errorLoaded = false;
APIException exception = null;
if (selectedAlgorithms != null) {
for (final CheckErrorAlgorithm algorithm : selectedAlgorithms) {
try {
if ((algorithm != null) && (algorithm.isAvailable()) && (algorithm.getPriority() != CWConfigurationError.PRIORITY_BOT_ONLY)) {
// Retrieving list of pages for the error number
setText(GT._T("Checking for errors n°{0}", Integer.toString(algorithm.getErrorNumber())) + " - " + algorithm.getShortDescriptionReplaced());
APIFactory.getCheckWiki().retrievePages(algorithm, errorLimit, getWikipedia(), errors);
errorLoaded = true;
}
} catch (APIException e) {
exception = e;
}
}
}
if (!errorLoaded && (exception != null)) {
return exception;
}
// Sorting errors by priority
setText(GT._T("Sorting errors by priority"));
Collections.sort(errors, new AlgorithmErrorComparator());
return null;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class FullAnalysisWorker method construct.
/* (non-Javadoc)
* @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
*/
@Override
public Object construct() {
try {
MediaWiki mw = MediaWiki.getMediaWikiAccess(this);
final API api = APIFactory.getAPI();
EnumWikipedia wiki = getWikipedia();
mw.retrieveContents(wiki, page, false, false, false, true, false);
api.retrieveLinks(wiki, page, Namespace.MAIN, knownPages, true, true);
// Retrieve disambiguation information if not already retrieved
List<Page> links = new ArrayList<>();
for (Page link : page.getLinks()) {
if (link.isDisambiguationPage() == null) {
links.add(link);
}
}
if (!links.isEmpty()) {
mw.retrieveDisambiguationInformation(wiki, links, knownPages, true, false, true);
}
// Retrieve more information on disambiguation pages
for (Page link : page.getLinks()) {
if (Boolean.TRUE.equals(link.isDisambiguationPage())) {
Iterator<Page> itLink = link.getRedirects().getIteratorWithPage();
while (itLink.hasNext()) {
Page link2 = itLink.next();
if (!link2.getRedirects().isRedirect()) {
mw.retrieveAllLinks(wiki, link2, null, knownPages, false, false);
}
if (link.hasWiktionaryTemplate() && (link.getContents() == null)) {
mw.retrieveContents(wiki, link2, false, false, false, true, false);
}
}
}
}
if (CheckErrorAlgorithms.isAlgorithmActive(wiki, 508)) {
mw.retrieveAllTemplates(wiki, page, false);
}
mw.block(true);
if (Boolean.FALSE.equals(page.isExisting())) {
mw.retrieveSimilarPages(wiki, page);
}
setText("Analyzing data");
PageAnalysis analysis = page.getAnalysis(page.getContents(), true);
AlgorithmError.analyzeErrors(algorithms, analysis, false);
} catch (APIException e) {
return e;
}
return null;
}
Aggregations