use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class CheckWiki method markAsFixed.
/**
* Mark a page as fixed.
*
* @param page Page.
* @param errorNumber Error number.
* @return True if it has been done.
*/
public boolean markAsFixed(Page page, String errorNumber) {
if (!Namespace.isHandledByCW(page.getNamespace())) {
return false;
}
try {
int error = Integer.parseInt(errorNumber);
notifyPageFixed(page, error);
if (error > CheckErrorAlgorithm.MAX_ERROR_NUMBER_WITH_LIST) {
return true;
}
EnumWikipedia wiki = page.getWikipedia();
String code = wiki.getSettings().getCodeCheckWiki().replace("-", "_");
Map<String, String> properties = new HashMap<>();
properties.put("id", Integer.toString(error));
properties.put("project", code);
// WMF Labs
properties.put("title", page.getTitle());
if (!useBotList) {
properties.put("view", "only");
labs.sendPost(rootPath + "cgi-bin/checkwiki.cgi", properties, null);
} else {
properties.put("action", "mark");
labs.sendPost(rootPath + "cgi-bin/checkwiki_bots.cgi", properties, null);
}
} catch (NumberFormatException e) {
return false;
} catch (APIException e) {
return false;
}
return true;
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class MonitorRCWindow method recentChanges.
/**
* Callback to be notified about recent changes.
*
* @param newRC List of recent changes.
* @param currentTime Current time.
* @see org.wikipediacleaner.api.RecentChangesListener#recentChanges(java.util.List, java.util.Date)
*/
@Override
public void recentChanges(List<RecentChange> newRC, Date currentTime) {
// Retrieve configuration
WPCConfiguration config = getWikipedia().getConfiguration();
long delayForNew = config.getLong(WPCConfigurationLong.RC_NEW_ARTICLE_WITH_DAB_DELAY) * 60 * 1000;
long delayMonitoring = config.getLong(WPCConfigurationLong.RC_KEEP_MONITORING_DELAY) * 60 * 1000;
// Add new recent changes to the list
modelRC.addRecentChanges(newRC);
// Remove old changes
List<RecentChange> filteredNewRC = new ArrayList<>();
for (RecentChange rc : newRC) {
if (currentTime.getTime() < rc.getTimestamp().getTime() + delayForNew) {
filteredNewRC.add(rc);
}
}
// Check if an update has been made on a monitored page
for (RecentChange rc : filteredNewRC) {
if (monitoredPages.containsKey(rc.getTitle())) {
Page page = DataManager.createSimplePage(getWikipedia(), rc.getTitle(), null, null, null);
try {
updateDabWarning.updateWarning(Collections.singletonList(page), null, null, null);
} catch (APIException e) {
// Nothing to do
}
monitoredPages.put(rc.getTitle(), Long.valueOf(currentTime.getTime()));
}
}
// Check monitored pages for expired delay
Iterator<Entry<String, Long>> itPages = monitoredPages.entrySet().iterator();
while (itPages.hasNext()) {
Entry<String, Long> entry = itPages.next();
if (currentTime.getTime() > entry.getValue().longValue() + delayMonitoring) {
itPages.remove();
}
}
// Update list of interesting recent changes
for (RecentChange rc : filteredNewRC) {
if (isInterestingNamespace(rc)) {
if (RecentChange.TYPE_NEW.equals(rc.getType())) {
if (rc.isNew()) {
modelRCInteresting.addRecentChange(rc);
}
} else if (RecentChange.TYPE_EDIT.equals(rc.getType())) {
if (modelRCInteresting.containsRecentChange(rc.getTitle())) {
modelRCInteresting.addRecentChange(rc);
}
} else if (RecentChange.TYPE_LOG.equals(rc.getType())) {
if (RecentChange.LOG_TYPE_DELETE.equals(rc.getLogType()) && RecentChange.LOG_ACTION_DELETE_DELETE.equals(rc.getLogAction())) {
modelRCInteresting.removeRecentChanges(rc.getTitle());
}
}
}
}
// Check if interesting recent changes are old enough
List<RecentChange> interestingRC = modelRCInteresting.getRecentChanges();
List<Page> pages = new ArrayList<>();
Map<String, String> creators = new HashMap<>();
Map<String, List<String>> modifiers = new HashMap<>();
while (!interestingRC.isEmpty()) {
// Retrieve synthetic information about recent changes for one title
List<RecentChange> listRC = extractRecentChanges(interestingRC);
String title = listRC.get(0).getTitle();
String creator = null;
List<String> pageModifiers = new ArrayList<>();
boolean oldEnough = true;
boolean redirect = false;
for (int rcNum = listRC.size(); rcNum > 0; rcNum--) {
RecentChange rc = listRC.get(rcNum - 1);
if (currentTime.getTime() <= rc.getTimestamp().getTime() + delayForNew) {
oldEnough = false;
}
String user = rc.getUser();
redirect = rc.isRedirect();
if (rc.isNew()) {
creator = user;
} else {
if (!rc.isBot()) {
if ((creator == null) || (!creator.equals(user))) {
if (!pageModifiers.contains(user)) {
pageModifiers.add(user);
}
}
}
}
}
if (oldEnough) {
modelRCInteresting.removeRecentChanges(title);
if (!redirect) {
Page page = DataManager.createSimplePage(getWikipedia(), title, null, null, null);
pages.add(page);
creators.put(title, creator);
modifiers.put(title, pageModifiers);
}
}
}
// Update disambiguation warnings
if (!pages.isEmpty()) {
try {
Stats stats = new Stats();
createDabWarning.updateWarning(pages, creators, modifiers, stats);
List<Page> updatedPages = stats.getUpdatedPages();
if (updatedPages != null) {
for (Page page : updatedPages) {
monitoredPages.put(page.getTitle(), Long.valueOf(currentTime.getTime()));
}
}
} catch (APIException e) {
// Nothing to do
}
}
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class MainWindow method actionRandomArticles.
/**
* @param redirects True if redirects are requested.
*/
private void actionRandomArticles(boolean redirects) {
final int maxPages = 50;
int count = 0;
while ((count < 1) || (count > maxPages)) {
String answer = askForValue(GT._T("How many pages do you want?"), "20", null);
if (answer == null) {
return;
}
try {
count = Integer.parseInt(answer);
} catch (NumberFormatException e) {
return;
}
if ((count < 1) || (count > maxPages)) {
displayWarning(GT._T("The number of pages must be between {0} and {1}", new Object[] { Integer.valueOf(0), Integer.valueOf(maxPages) }));
}
}
API api = APIFactory.getAPI();
try {
List<String> pageNames = new ArrayList<>(count);
while (pageNames.size() < count) {
List<Page> pages = api.getRandomPages(getWikipedia(), count - pageNames.size(), redirects);
for (int i = 0; i < pages.size(); i++) {
pageNames.add(pages.get(i).getTitle());
}
}
Collections.sort(pageNames);
new PageListWorker(getWikipedia(), this, null, pageNames, PageListWorker.Mode.DIRECT, false, GT._T("Random pages")).start();
} catch (APIException e) {
displayError(e);
return;
}
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class ActionCheckArticle method checkArticles.
/**
* @param pages List of pages.
* @param wiki Wiki.
*/
private static void checkArticles(List<Page> pages, EnumWikipedia wiki) {
CheckArticleTools tools = new CheckArticleTools(wiki);
try {
for (Page page : pages) {
tools.checkArticle(page, null);
}
} catch (APIException e) {
return;
}
String report = tools.getReport();
InformationWindow.createInformationWindow(GT._T("Analysis"), report, false, wiki);
}
use of org.wikipediacleaner.api.APIException in project wpcleaner by WPCleaner.
the class RestApiJsonResult method checkForError.
/**
* Check for errors reported by the API.
*
* @param root Document root.
* @throws APIException Exception thrown by the API.
*/
protected void checkForError(JsonNode root) throws APIException {
if (root == null) {
return;
}
// Check for errors
JsonNode error = root.path("error");
if ((error != null) && !error.isMissingNode()) {
String code = error.path("code").asText("?");
String info = error.path("info").asText("?");
String text = "Error reported: " + code + " - " + info;
log.warn(text);
throw new APIException(text, code);
}
// Check for warnings
JsonNode warnings = root.path("warnings");
if ((warnings != null) && !warnings.isMissingNode()) {
log.warn("Warning reported: ");
String query = warnings.path("query").asText();
if (query != null) {
log.warn(query);
}
String info = warnings.path("info").asText();
if (info != null) {
log.warn(info);
}
}
}
Aggregations