use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderFactory in project BiglyBT by BiglySoftware.
the class WebEngine method getWebPageContentSupport.
private pageDetails getWebPageContentSupport(Proxy proxy, String proxy_host, String searchURL, SearchParameter[] searchParameters, Map<String, String> searchContext, String headers, boolean only_if_modified) throws SearchException {
try {
TorrentUtils.setTLSDescription("Search: " + getName());
if (requiresLogin()) {
throw new SearchLoginException("login required");
}
boolean vuze_file = searchURL.toLowerCase().startsWith("vuze:");
if (!vuze_file) {
String[] from_strs = new String[searchParameters.length];
String[] to_strs = new String[searchParameters.length];
for (int i = 0; i < searchParameters.length; i++) {
SearchParameter parameter = searchParameters[i];
from_strs[i] = "%" + parameter.getMatchPattern();
to_strs[i] = URLEncoder.encode(parameter.getValue(), "UTF-8");
}
searchURL = GeneralUtils.replaceAll(searchURL, from_strs, to_strs);
Iterator<Map.Entry<String, String>> it = searchContext.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
String key = entry.getKey();
if (supportsContext(key)) {
if (searchURL.indexOf('?') == -1) {
searchURL += "?";
} else {
searchURL += "&";
}
String value = entry.getValue();
searchURL += key + "=" + URLEncoder.encode(value, "UTF-8");
}
}
}
// System.out.println(searchURL);
// hack to support POST by encoding into URL
// http://xxxx/index.php?main=search&azmethod=post_basic:SearchString1=%s&SearchString=&search=Search
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
URL initial_url;
ResourceDownloader initial_url_rd;
int post_pos = searchURL.indexOf("azmethod=");
if (post_pos > 0) {
String post_params = searchURL.substring(post_pos + 9);
searchURL = searchURL.substring(0, post_pos - 1);
debugLog("search_url: " + searchURL + ", post=" + post_params);
initial_url = new URL(searchURL);
int sep = post_params.indexOf(':');
String type = post_params.substring(0, sep);
if (!type.equals("post_basic")) {
throw (new SearchException("Only basic type supported"));
}
post_params = post_params.substring(sep + 1);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url, post_params);
} else {
initial_url_rd = rdf.create(initial_url, post_params, proxy);
}
initial_url_rd.setProperty("URL_Content-Type", "application/x-www-form-urlencoded");
} else {
debugLog("search_url: " + searchURL);
initial_url = new URL(searchURL);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url);
} else {
initial_url_rd = rdf.create(initial_url, proxy);
}
}
if (proxy_host != null) {
initial_url_rd.setProperty("URL_HOST", proxy_host);
}
setHeaders(initial_url_rd, headers);
if (needsAuth && local_cookies != null) {
initial_url_rd.setProperty("URL_Cookie", local_cookies);
} else if (fullCookies != null && fullCookies.length() > 0) {
initial_url_rd.setProperty("URL_Cookie", fullCookies);
}
if (only_if_modified) {
String last_modified = getLocalString(LD_LAST_MODIFIED);
String etag = getLocalString(LD_ETAG);
if (last_modified != null) {
initial_url_rd.setProperty("URL_If-Modified-Since", last_modified);
}
if (etag != null) {
initial_url_rd.setProperty("URL_If-None-Match", etag);
}
}
InputStream is = null;
try {
String content_charset = "UTF-8";
ResourceDownloader mr_rd = null;
if (initial_url.getProtocol().equalsIgnoreCase("file")) {
// handle file://c:/ - map to file:/c:/
String str = initial_url.toExternalForm();
if (initial_url.getAuthority() != null) {
str = str.replaceFirst("://", ":/");
}
int pos = str.indexOf('?');
if (pos != -1) {
str = str.substring(0, pos);
}
is = new FileInputStream(new File(new URL(str).toURI()));
} else {
if (proxy == null) {
initial_url_rd.setProperty("URL_Connect_Timeout", 10 * 1000);
initial_url_rd.setProperty("URL_Read_Timeout", 10 * 1000);
}
mr_rd = rdf.getMetaRefreshDownloader(initial_url_rd);
try {
is = mr_rd.download();
} catch (ResourceDownloaderException e) {
Long response = (Long) mr_rd.getProperty("URL_HTTP_Response");
if (response != null && response.longValue() == 304) {
return (new pageDetails(initial_url, initial_url, ""));
} else {
throw (e);
}
}
if (needsAuth) {
List cookies_list = (List) mr_rd.getProperty("URL_Set-Cookie");
List cookies_set = new ArrayList();
if (cookies_list != null) {
for (int i = 0; i < cookies_list.size(); i++) {
String[] cookies = ((String) cookies_list.get(i)).split(";");
for (int j = 0; j < cookies.length; j++) {
String cookie = cookies[j].trim();
if (cookie.indexOf('=') != -1) {
cookies_set.add(cookie);
}
}
}
}
// well, not much we can do with the cookies anyway as in general the ones
// set are the ones missing/expired, not the existing ones. That is, we can't
// deduce anything from the fact that a required cookie is not 'set' here
// the most we could do is catch a server that explicitly deleted invalid
// cookies by expiring it, but I doubt this is a common practice.
// Also note the complexity of cookie syntax
// Set-Cookie: old standard using expires=, new using MaxAge
// Set-Cookie2:
// Maybe use http://jcookie.sourceforge.net/ if needed
}
if (only_if_modified) {
String last_modified = extractProperty(mr_rd.getProperty("URL_Last-Modified"));
String etag = extractProperty(mr_rd.getProperty("URL_ETag"));
if (last_modified != null) {
setLocalString(LD_LAST_MODIFIED, last_modified);
}
if (etag != null) {
setLocalString(LD_ETAG, etag);
}
}
List cts = (List) mr_rd.getProperty("URL_Content-Type");
if (cts != null && cts.size() > 0) {
String content_type = (String) cts.get(0);
int pos = content_type.toLowerCase().indexOf("charset");
if (pos != -1) {
content_type = content_type.substring(pos + 1);
pos = content_type.indexOf('=');
if (pos != -1) {
content_type = content_type.substring(pos + 1).trim();
pos = content_type.indexOf(';');
if (pos != -1) {
content_type = content_type.substring(0, pos).trim();
}
if (content_type.startsWith("\"")) {
content_type = content_type.substring(1).trim();
}
if (content_type.endsWith("\"")) {
content_type = content_type.substring(0, content_type.length() - 1).trim();
}
try {
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable e) {
try {
// handle lowercase 'utf-8' for example
content_type = content_type.toUpperCase();
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable f) {
log("Content type '" + content_type + "' not supported", f);
}
}
}
}
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
byte[] buffer = new byte[8192];
while (true) {
int len = is.read(buffer);
if (len <= 0) {
break;
}
baos.write(buffer, 0, len);
}
byte[] data = baos.toByteArray();
if (vuze_file) {
try {
VuzeFileHandler vfh = VuzeFileHandler.getSingleton();
VuzeFile vf = vfh.loadVuzeFile(data);
vfh.handleFiles(new VuzeFile[] { vf }, VuzeFileComponent.COMP_TYPE_NONE);
} catch (Throwable e) {
Debug.out(e);
}
return (new pageDetails(initial_url, initial_url, null));
}
String page = null;
String content = new String(data, 0, Math.min(data.length, 2048), content_charset);
String lc_content = content.toLowerCase();
{
// first look for xml charset
// e.g. <?xml version="1.0" encoding="windows-1251" ?>
int pos1 = lc_content.indexOf("<?xml");
if (pos1 != -1) {
int pos2 = lc_content.indexOf("?>");
if (pos2 != -1) {
int pos3 = lc_content.indexOf("encoding", pos1);
if (pos3 != -1) {
pos3 = lc_content.indexOf("\"", pos3);
}
if (pos3 > pos1 && pos3 < pos2) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 > pos3 && pos4 < pos2) {
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from xml tag: " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
}
}
}
}
}
if (page == null) {
// next look for http-equiv charset
// e.g. <meta http-equiv="Content-Type" content="text/html; charset=windows-1251" />
int pos = 0;
while (true) {
int pos1 = lc_content.indexOf("http-equiv", pos);
if (pos1 != -1) {
int pos2 = lc_content.indexOf(">", pos1);
if (pos2 != -1) {
int pos3 = lc_content.indexOf("charset", pos1);
if (pos3 != -1 && pos3 < pos2) {
pos3 = lc_content.indexOf("=", pos3);
if (pos3 != -1) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 != -1) {
int pos5 = lc_content.indexOf(";", pos3);
if (pos5 != -1 && pos5 < pos4) {
pos4 = pos5;
}
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from http-equiv : " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
break;
}
}
}
pos = pos2;
} else {
break;
}
} else {
break;
}
}
}
if (page == null) {
page = new String(data, content_charset);
}
debugLog("page:");
debugLog(page);
try {
Matcher m = baseTagPattern.matcher(page);
if (m.find()) {
basePage = m.group(1);
debugLog("base_page: " + basePage);
}
} catch (Exception e) {
// No BASE tag in the page
}
URL final_url = initial_url;
if (mr_rd != null) {
URL x = (URL) mr_rd.getProperty("URL_URL");
if (x != null) {
final_url = x;
}
}
return (new pageDetails(initial_url, final_url, page));
} finally {
if (is != null) {
is.close();
}
}
} catch (SearchException e) {
throw (e);
} catch (Throwable e) {
// e.printStackTrace();
debugLog("Failed to load page: " + Debug.getNestedExceptionMessageAndStack(e));
throw (new SearchException("Failed to load page", e));
} finally {
TorrentUtils.setTLSDescription(null);
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderFactory in project BiglyBT by BiglySoftware.
the class PlatformMessenger method downloadURLSupport.
private static Object[] downloadURLSupport(Proxy proxy, String proxy_host, URL url, String postData) throws Throwable {
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
ResourceDownloader rd;
if (proxy == null) {
rd = rdf.create(url, postData);
} else {
rd = rdf.create(url, postData, proxy);
}
if (proxy_host != null) {
rd.setProperty("URL_HOST", proxy_host);
}
rd.setProperty("URL_Connection", "Keep-Alive");
rd = rdf.getRetryDownloader(rd, 3);
// We could report percentage to listeners, but there's no need to atm
// rd.addListener(new ResourceDownloaderListener() {
//
// public void reportPercentComplete(ResourceDownloader downloader,
// int percentage) {
// }
//
// public void reportActivity(ResourceDownloader downloader, String activity) {
// }
//
// public void failed(ResourceDownloader downloader,
// ResourceDownloaderException e) {
// }
//
// public boolean completed(ResourceDownloader downloader, InputStream data) {
// return true;
// }
// });
InputStream is = rd.download();
byte[] data;
try {
int length = is.available();
data = new byte[length];
is.read(data);
} finally {
is.close();
}
String s = new String(data, "UTF8");
Map mapAllReplies = JSONUtils.decodeJSON(s);
List listReplies = MapUtils.getMapList(mapAllReplies, "replies", null);
return (new Object[] { s, listReplies });
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderFactory in project BiglyBT by BiglySoftware.
the class SubscriptionSchedulerImpl method download.
@Override
public void download(final Subscription subs, final SubscriptionResult original_result) {
String download_link = original_result.getDownloadLink();
if (download_link == null) {
log(subs.getName() + ": can't download " + original_result.getID() + " as no direct download link available");
return;
}
final String key = subs.getID() + ":" + original_result.getID();
final String dl = download_link;
synchronized (active_result_downloaders) {
if (active_result_downloaders.contains(key)) {
return;
}
log(subs.getName() + ": queued result for download - " + original_result.getID() + "/" + download_link);
active_result_downloaders.add(key);
result_downloader.run(new AERunnable() {
@Override
public void runSupport() {
// need to fix up to the latest history due to the lazy nature of things :(
SubscriptionResult result = subs.getHistory().getResult(original_result.getID());
boolean success = false;
try {
if (result == null) {
log(subs.getName() + ": result has been deleted - " + original_result.getID());
success = true;
} else if (result.getRead()) {
log(subs.getName() + ": result already marked as read, skipping - " + result.getID());
success = true;
} else {
boolean retry = true;
boolean use_ref = subs.getHistory().getDownloadWithReferer();
boolean tried_ref_switch = false;
while (retry) {
retry = false;
try {
TorrentUtils.setTLSDescription("Subscription: " + subs.getName());
URL original_url = new URL(dl);
PluginProxy plugin_proxy = null;
if (dl.startsWith("tor:")) {
String target_resource = dl.substring(4);
original_url = new URL(target_resource);
Map<String, Object> options = new HashMap<>();
options.put(AEProxyFactory.PO_PEER_NETWORKS, new String[] { AENetworkClassifier.AT_TOR });
plugin_proxy = AEProxyFactory.getPluginProxy("Subscription result download of '" + target_resource + "'", original_url, options, true);
if (plugin_proxy == null) {
throw (new Exception("No Tor plugin proxy available for '" + dl + "'"));
}
}
URL current_url = plugin_proxy == null ? original_url : plugin_proxy.getURL();
Torrent torrent = null;
try {
while (true) {
try {
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
ResourceDownloader url_rd = rdf.create(current_url, plugin_proxy == null ? null : plugin_proxy.getProxy());
if (plugin_proxy != null) {
url_rd.setProperty("URL_HOST", plugin_proxy.getURLHostRewrite() + (current_url.getPort() == -1 ? "" : (":" + current_url.getPort())));
}
String referer = use_ref ? subs.getReferer() : null;
UrlUtils.setBrowserHeaders(url_rd, referer);
Engine engine = subs.getEngine();
if (engine instanceof WebEngine) {
WebEngine we = (WebEngine) engine;
if (we.isNeedsAuth()) {
String cookies = we.getCookies();
if (cookies != null && cookies.length() > 0) {
url_rd.setProperty("URL_Cookie", cookies);
}
}
}
ResourceDownloader mr_rd = rdf.getMetaRefreshDownloader(url_rd);
InputStream is = mr_rd.download();
torrent = new TorrentImpl(TOTorrentFactory.deserialiseFromBEncodedInputStream(is));
break;
} catch (Throwable e) {
if (plugin_proxy == null) {
plugin_proxy = AEProxyFactory.getPluginProxy("Subscription result download", original_url);
if (plugin_proxy != null) {
current_url = plugin_proxy.getURL();
continue;
}
}
throw (e);
}
}
} finally {
if (plugin_proxy != null) {
plugin_proxy.setOK(torrent != null);
}
}
byte[] hash = torrent.getHash();
// PlatformTorrentUtils.setContentTitle(torrent, torr );
DownloadManager dm = PluginInitializer.getDefaultInterface().getDownloadManager();
Download download;
// if we're assigning a tag/networks then we need to add it stopped in case the tag has any pre-start actions (e.g. set initial save location)
// this is because the assignments are done in SubscriptionManagerImpl on the download(willbe)added event
boolean stop_override = subs.getTagID() >= 0 || subs.getHistory().getDownloadNetworks() != null;
boolean auto_start = manager.shouldAutoStart(torrent);
manager.addPrepareTrigger(hash, new Subscription[] { subs }, new SubscriptionResult[] { result });
try {
if (auto_start && !stop_override) {
download = dm.addDownload(torrent);
} else {
download = dm.addDownloadStopped(torrent, null, null);
}
} finally {
manager.removePrepareTrigger(hash);
}
log(subs.getName() + ": added download " + download.getName() + ": auto-start=" + auto_start);
// maybe remove this as should be actioned in the trigger?
manager.prepareDownload(download, new Subscription[] { subs }, new SubscriptionResult[] { result });
subs.addAssociation(hash);
if (auto_start && stop_override) {
download.restart();
}
result.setRead(true);
success = true;
if (tried_ref_switch) {
subs.getHistory().setDownloadWithReferer(use_ref);
}
} catch (Throwable e) {
log(subs.getName() + ": Failed to download result " + dl, e);
if (e instanceof TOTorrentException && !tried_ref_switch) {
use_ref = !use_ref;
tried_ref_switch = true;
retry = true;
log(subs.getName() + ": Retrying " + (use_ref ? "with referer" : "without referer"));
}
} finally {
TorrentUtils.setTLSDescription(null);
}
}
}
} finally {
try {
if (!success) {
if (dl.startsWith("azplug:") || dl.startsWith("chat:")) {
// whatever the outcome these have been handled async
result.setRead(true);
} else {
int rad = manager.getAutoDownloadMarkReadAfterDays();
if (rad > 0) {
long rad_millis = rad * 24 * 60 * 60 * 1000L;
long time_found = result.getTimeFound();
if (time_found > 0 && time_found + rad_millis < SystemTime.getCurrentTime()) {
log(subs.getName() + ": result expired, marking as read - " + result.getID());
result.setRead(true);
}
}
}
}
} catch (Throwable e) {
Debug.out(e);
}
synchronized (active_result_downloaders) {
active_result_downloaders.remove(key);
}
calculateSchedule();
}
}
});
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderFactory in project BiglyBT by BiglySoftware.
the class MagnetPlugin method doSecondaryLookup.
protected void doSecondaryLookup(final MagnetPluginProgressListener listener, final Object[] result, byte[] hash, Set<String> networks_enabled, String args) {
if (listener != null) {
listener.reportActivity(getMessageText("report.secondarylookup", null));
}
PluginProxy plugin_proxy = null;
try {
URL original_sl_url = new URL(SECONDARY_LOOKUP + "magnetLookup?hash=" + Base32.encode(hash) + (args.length() == 0 ? "" : ("&args=" + UrlUtils.encode(args))));
URL sl_url = original_sl_url;
Proxy proxy = null;
if (!networks_enabled.contains(AENetworkClassifier.AT_PUBLIC)) {
plugin_proxy = AEProxyFactory.getPluginProxy("secondary magnet lookup", sl_url);
if (plugin_proxy == null) {
throw (new NoRouteToHostException("plugin proxy unavailable"));
} else {
proxy = plugin_proxy.getProxy();
sl_url = plugin_proxy.getURL();
}
}
ResourceDownloaderFactory rdf = plugin_interface.getUtilities().getResourceDownloaderFactory();
ResourceDownloader rd;
if (proxy == null) {
rd = rdf.create(sl_url);
} else {
rd = rdf.create(sl_url, proxy);
rd.setProperty("URL_HOST", original_sl_url.getHost());
}
final PluginProxy f_pp = plugin_proxy;
rd.addListener(new ResourceDownloaderAdapter() {
@Override
public boolean completed(ResourceDownloader downloader, InputStream data) {
try {
if (listener != null) {
listener.reportActivity(getMessageText("report.secondarylookup.ok", null));
}
synchronized (result) {
result[0] = data;
}
return (true);
} finally {
complete();
}
}
@Override
public void failed(ResourceDownloader downloader, ResourceDownloaderException e) {
try {
synchronized (result) {
result[0] = e;
}
if (listener != null) {
listener.reportActivity(getMessageText("report.secondarylookup.fail"));
}
} finally {
complete();
}
}
private void complete() {
if (f_pp != null) {
// outcome doesn't really indicate whether the result was wholesome
f_pp.setOK(true);
}
}
});
rd.asyncDownload();
} catch (Throwable e) {
if (plugin_proxy != null) {
// tidy up, no indication of proxy badness here so say its ok
plugin_proxy.setOK(true);
}
if (listener != null) {
listener.reportActivity(getMessageText("report.secondarylookup.fail", Debug.getNestedExceptionMessage(e)));
}
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderFactory in project BiglyBT by BiglySoftware.
the class PlatformManagerUpdateChecker method checkForUpdate.
@Override
public void checkForUpdate(final UpdateChecker checker) {
try {
SFPluginDetails sf_details = SFPluginDetailsLoaderFactory.getSingleton().getPluginDetails(plugin_interface.getPluginID());
String current_version = plugin_interface.getPluginVersion();
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "PlatformManager:Win32 update check starts: current = " + current_version));
boolean current_az_is_cvs = Constants.isCVSVersion();
String sf_plugin_version = sf_details.getVersion();
String sf_comp_version = sf_plugin_version;
if (current_az_is_cvs) {
String sf_cvs_version = sf_details.getCVSVersion();
if (sf_cvs_version.length() > 0) {
// sf cvs version ALWAYS entry in _CVS
sf_plugin_version = sf_cvs_version;
sf_comp_version = sf_plugin_version.substring(0, sf_plugin_version.length() - 4);
}
}
String target_version = null;
if (sf_comp_version.length() == 0 || !Character.isDigit(sf_comp_version.charAt(0))) {
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, "PlatformManager:Win32 no valid version to check against (" + sf_comp_version + ")"));
} else if (Constants.compareVersions(current_version, sf_comp_version) < 0) {
target_version = sf_comp_version;
}
checker.reportProgress("Win32: current = " + current_version + ", latest = " + sf_comp_version);
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "PlatformManager:Win32 update required = " + (target_version != null)));
if (target_version != null) {
String target_download = sf_details.getDownloadURL();
if (current_az_is_cvs) {
String sf_cvs_version = sf_details.getCVSVersion();
if (sf_cvs_version.length() > 0) {
target_download = sf_details.getCVSDownloadURL();
}
}
ResourceDownloaderFactory rdf = ResourceDownloaderFactoryImpl.getSingleton();
ResourceDownloader direct_rdl = rdf.create(new URL(target_download));
String torrent_download = Constants.URL_PLUGINS_TORRENT_BASE;
int slash_pos = target_download.lastIndexOf("/");
if (slash_pos == -1) {
torrent_download += target_download;
} else {
torrent_download += target_download.substring(slash_pos + 1);
}
torrent_download += ".torrent";
if (I2PHelpers.isI2PInstalled()) {
torrent_download += "?i2p=1";
}
ResourceDownloader torrent_rdl = rdf.create(new URL(torrent_download));
torrent_rdl = rdf.getSuffixBasedDownloader(torrent_rdl);
// create an alternate downloader with torrent attempt first
ResourceDownloader alternate_rdl = rdf.getAlternateDownloader(new ResourceDownloader[] { torrent_rdl, direct_rdl });
// get size here so it is cached
rdf.getTimeoutDownloader(rdf.getRetryDownloader(alternate_rdl, RD_SIZE_RETRIES), RD_SIZE_TIMEOUT).getSize();
List update_desc = new ArrayList();
List desc_lines = HTMLUtils.convertHTMLToText("", sf_details.getDescription());
update_desc.addAll(desc_lines);
List comment_lines = HTMLUtils.convertHTMLToText(" ", sf_details.getComment());
update_desc.addAll(comment_lines);
String[] update_d = new String[update_desc.size()];
update_desc.toArray(update_d);
final Update update = checker.addUpdate(UPDATE_NAME, update_d, current_version, target_version, alternate_rdl, Update.RESTART_REQUIRED_YES);
update.setDescriptionURL(sf_details.getInfoURL());
alternate_rdl.addListener(new ResourceDownloaderAdapter() {
@Override
public boolean completed(final ResourceDownloader downloader, InputStream data) {
installUpdate(checker, update, downloader, data);
return (true);
}
@Override
public void failed(ResourceDownloader downloader, ResourceDownloaderException e) {
// Debug.out( downloader.getName() + " failed", e );
update.complete(false);
}
});
}
} catch (Throwable e) {
Debug.printStackTrace(e);
checker.reportProgress("Failed to load plugin details for the platform manager: " + Debug.getNestedExceptionMessage(e));
checker.failed();
} finally {
checker.completed();
}
}
Aggregations