use of org.wikipediacleaner.api.data.Page in project wpcleaner by WPCleaner.
the class MediaWiki method replaceText.
/**
* Replace text in a list of pages.
*
* @param pages List of pages.
* @param replacements List of text replacements
* Key: Additional comments used for the modification.
* Value: Text replacements.
* @param wiki Wiki.
* @param comment Comment used for the modification.
* @param report (Out) Report of changes made.
* @param automaticCW Lit of CW fixing that should be done.
* @param forceCW List of CW fixing that should be done even if no automatic replacement was done.
* @param save True if modification should be saved.
* @param updateDabWarning True to update disambiguation warning.
* @param minor True if the modification should be tagged as minor.
* @param pauseAfterEachEdit True to pause after each edit.
* @param botFix True to apply bot fixes.
* @param parent Parent window.
* @return Count of modified pages.
* @throws APIException Exception thrown by the API.
*/
public int replaceText(Page[] pages, Map<String, List<AutomaticFixing>> replacements, EnumWikipedia wiki, String comment, ModificationReport report, Collection<CheckErrorAlgorithm> automaticCW, Collection<CheckErrorAlgorithm> forceCW, boolean save, boolean updateDabWarning, boolean minor, boolean pauseAfterEachEdit, boolean botFix, Component parent) throws APIException {
if ((pages == null) || (replacements == null) || (replacements.size() == 0)) {
return 0;
}
// Initialize page loading
Configuration config = Configuration.getConfiguration();
int nThreads = Math.max(config.getInt(null, ConfigurationValueInteger.INTERROG_THREAD), 1);
int currentPage = 0;
while ((currentPage < nThreads) && (currentPage < pages.length)) {
// TODO: withRedirects=false ?
retrieveContents(wiki, pages[currentPage], false, true, false, true, false);
// To release memory
pages[currentPage] = null;
currentPage++;
}
// Analyze pages
UpdateDabWarningTools dabWarnings = new UpdateDabWarningTools(wiki, null, false, false);
int count = 0;
final API api = APIFactory.getAPI();
StringBuilder details = new StringBuilder();
StringBuilder fullComment = new StringBuilder();
ModificationReport.Modification modification = null;
boolean stopRequested = false;
while (hasRemainingTask() && !shouldStop() && !stopRequested) {
Object result = getNextResult();
if (currentPage < pages.length) {
// TODO: withRedirects=false ?
retrieveContents(wiki, pages[currentPage], false, true, false, true, false);
// To release memory
pages[currentPage] = null;
currentPage++;
}
if ((result != null) && (result instanceof Page)) {
List<String> replacementsDone = new ArrayList<>();
Page page = (Page) result;
String oldContents = page.getContents();
if (oldContents != null) {
String newContents = oldContents;
details.setLength(0);
fullComment.setLength(0);
if (report != null) {
modification = new ModificationReport.Modification(page.getTitle());
}
// Apply automatic fixing
for (Entry<String, List<AutomaticFixing>> replacement : replacements.entrySet()) {
replacementsDone.clear();
String tmpContents = AutomaticFixing.apply(replacement.getValue(), newContents, replacementsDone);
if (!newContents.equals(tmpContents)) {
newContents = tmpContents;
// Update description
if (modification != null) {
for (String replacementDone : replacementsDone) {
modification.addModification(replacementDone);
}
}
// Memorize replacement
if ((replacement.getKey() != null) && (replacement.getKey().length() > 0)) {
if (details.length() > 0) {
details.append(", ");
}
details.append(replacement.getKey());
}
}
}
fullComment.append(wiki.createUpdatePageComment(comment, details.toString()));
// Apply automatic CW fixing if needed
if (automaticCW != null) {
// Apply fixing
List<AlgorithmError.Progress> usedAlgorithms = new ArrayList<>();
String tmpContents = AutomaticFormatter.tidyArticle(page, newContents, automaticCW, botFix, usedAlgorithms);
// Decide if modifications should be kept
boolean shouldKeep = (!oldContents.equals(newContents));
if (forceCW != null) {
for (AlgorithmError.Progress progress : usedAlgorithms) {
if (forceCW.contains(progress.algorithm)) {
shouldKeep = true;
}
}
}
// Keep modifications
if (shouldKeep) {
newContents = tmpContents;
if (!usedAlgorithms.isEmpty()) {
fullComment.append(" / ");
fullComment.append(wiki.getCWConfiguration().getComment(usedAlgorithms));
if (modification != null) {
for (AlgorithmError.Progress progress : usedAlgorithms) {
CheckErrorAlgorithm algorithm = progress.algorithm;
modification.addModification(algorithm.getShortDescriptionReplaced());
}
}
}
}
}
// Page contents has been modified
if (!oldContents.equals(newContents)) {
if (report != null) {
report.addModification(modification);
}
// Save page
setText(GT._T("Updating page {0}", page.getTitle()));
count++;
if (save && !stopRequested) {
try {
api.updatePage(wiki, page, newContents, fullComment.toString(), true, minor, false, false);
if (updateDabWarning) {
List<Page> tmpList = new ArrayList<>(1);
tmpList.add(page);
dabWarnings.updateWarning(tmpList, null, null, null);
}
if (pauseAfterEachEdit) {
int answer = Utilities.displayYesNoAllWarning(parent, GT._T("The page {0} has been modified.", page.getTitle()) + "\n" + GT._T("Do you want to continue?"));
switch(answer) {
case JOptionPane.YES_OPTION:
break;
case Utilities.YES_ALL_OPTION:
pauseAfterEachEdit = false;
break;
default:
stopRequested = true;
}
}
} catch (APIException e) {
EnumQueryResult error = e.getQueryResult();
if (report != null) {
report.addError(new ModificationReport.Error(page.getTitle(), error));
}
if (EnumQueryResult.PROTECTED_PAGE.equals(error)) {
System.err.println("Page " + page.getTitle() + " is protected.");
} else {
throw e;
}
}
}
}
}
}
}
block(true);
return count;
}
use of org.wikipediacleaner.api.data.Page in project wpcleaner by WPCleaner.
the class Bot method executeFixListCheckWiki.
/**
* Execute an action of type FixListCheckWiki.
*
* @param actionConfig Parameters of the action.
* @return True if the action was executed.
*/
private BasicWorker executeFixListCheckWiki(Action actionConfig) {
Page page = null;
if (actionConfig.actionArgs.length > 0) {
page = DataManager.createSimplePage(wiki, actionConfig.actionArgs[0], null, null, null);
}
List<CheckErrorAlgorithm> algorithms = new ArrayList<>();
List<CheckErrorAlgorithm> allAlgorithms = new ArrayList<>();
if (actionConfig.actionArgs.length > 1) {
extractAlgorithms(algorithms, allAlgorithms, actionConfig.actionArgs, 1);
}
AutomaticListCWWorker worker = new AutomaticListCWWorker(wiki, null, page, algorithms, allAlgorithms, namespaces, null, true, false);
worker.setRange(rangeBegin, rangeEnd);
return worker;
}
use of org.wikipediacleaner.api.data.Page in project wpcleaner by WPCleaner.
the class AlgorithmError method addCheckErrorPages.
/**
* @param errors Errors list.
* @param wikipedia Wikipedia.
* @param errorNumber Error number.
* @param pages List of pages in error.
*/
public static void addCheckErrorPages(List<AlgorithmError> errors, EnumWikipedia wikipedia, int errorNumber, List<Page> pages) {
// Analyze properties to find informations about error number
if (!CheckErrorAlgorithms.isAlgorithmActive(wikipedia, errorNumber)) {
return;
}
// Check that the list of pages in error is not empty
if ((pages == null) || (pages.isEmpty())) {
return;
}
// Create error
AlgorithmError error = new AlgorithmError(wikipedia, errorNumber);
for (Page page : pages) {
error.addPage(page.getTitle(), page.getPageId());
}
// Add / Replace error
for (int i = errors.size(); i > 0; i--) {
if (errors.get(i - 1).getErrorNumber() == errorNumber) {
errors.remove(i - 1);
}
}
errors.add(error);
}
use of org.wikipediacleaner.api.data.Page in project wpcleaner by WPCleaner.
the class DisambiguationWindow method afterFinishedReloadWorker.
/**
* Callback called at the end of the Reload Worker.
*/
@Override
protected void afterFinishedReloadWorker() {
super.afterFinishedReloadWorker();
Configuration config = Configuration.getConfiguration();
Page page = getPage();
backlinksProperties = config.getSubProperties(getWikipedia(), Configuration.PROPERTIES_BACKLINKS, page.getTitle());
listCellRenderer.setPageProperties(backlinksProperties);
popupListenerLinks.setPage(page);
popupListenerLinks.setBackLinksProperties(backlinksProperties);
List<Page> links = page.getAllLinksToPage();
if (config.getBoolean(null, ConfigurationValueBoolean.IGNORE_DAB_USER_NS)) {
links = new ArrayList<>(links);
for (int i = links.size(); i > 0; i--) {
if (links.get(i - 1).isInUserNamespace()) {
links.remove(i - 1);
}
}
}
modelLinks.setElements(links);
Integer countMain = page.getBacklinksCountInMainNamespace();
Integer countTotal = page.getBacklinksCount();
linkCount.setText(((countMain != null) ? countMain.toString() : "?") + " / " + ((countTotal != null) ? countTotal.toString() : "?"));
// Construct list of known pages
knownPages.clear();
if (getPage() != null) {
knownPages = new ArrayList<>(1);
knownPages.add(getPage());
for (Page backLink : getPage().getAllLinksToPage()) {
PageRedirect redirects = backLink.getRedirects();
if ((redirects != null) && (redirects.isRedirect()) && (Page.areSameTitle(getPage().getTitle(), redirects.getTitle()))) {
knownPages.add(backLink);
}
}
}
// Select next links
actionSelectNextLinks();
}
use of org.wikipediacleaner.api.data.Page in project wpcleaner by WPCleaner.
the class DisambiguationWindow method createSortMenu.
/**
* @return Sort menu.
*/
private JMenu createSortMenu() {
JMenu menu = Utilities.createJMenu(GT._T("Sort"));
List<CompositeComparator<Page>> comparators = PageComparator.getComparators();
for (CompositeComparator<Page> comparator : comparators) {
JMenuItem menuItem = Utilities.createJMenuItem(comparator.getName(), true);
menuItem.addActionListener(new SetComparatorAction(modelLinks, comparator));
menu.add(menuItem);
}
return menu;
}
Aggregations