use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloader in project BiglyBT by BiglySoftware.
the class MetaSearchImpl method updateEngine.
protected boolean updateEngine(EngineImpl engine) {
String update_url = engine.getUpdateURL();
int pos = update_url.indexOf('?');
if (pos == -1) {
update_url += "?";
} else {
update_url += "&";
}
update_url += "az_template_uid=" + engine.getUID() + "&az_template_version=" + engine.getVersion() + "&az_version=" + Constants.AZUREUS_VERSION + "&az_locale=" + MessageText.getCurrentLocale().toString() + "&az_rand=" + RandomUtils.nextAbsoluteLong();
log("Engine " + engine.getName() + ": auto-update check via " + update_url);
try {
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
ResourceDownloader url_rd = rdf.create(new URL(update_url));
ResourceDownloader rd = rdf.getMetaRefreshDownloader(url_rd);
InputStream is = rd.download();
try {
Map<String, Object> map = BDecoder.decode(new BufferedInputStream(is));
log(" update check reply: " + map);
// reply is either "response" meaning "no update" and giving possibly changed update secs
// or Vuze file with updated template
Map<String, Object> response = (Map<String, Object>) map.get("response");
if (response != null) {
Long update_secs = (Long) response.get("update_url_check_secs");
if (update_secs == null) {
engine.setLocalUpdateCheckSecs(0);
} else {
int check_secs = update_secs.intValue();
if (check_secs < MIN_UPDATE_CHECK_SECS) {
log(" update check secs for to small, min is " + MIN_UPDATE_CHECK_SECS);
check_secs = MIN_UPDATE_CHECK_SECS;
}
engine.setLocalUpdateCheckSecs(check_secs);
}
return (true);
} else {
VuzeFile vf = VuzeFileHandler.getSingleton().loadVuzeFile(map);
if (vf == null) {
log(" failed to decode vuze file");
return (false);
}
Engine[] updated_engines = manager.loadFromVuzeFile(vf);
if (updated_engines.length > 0) {
String existing_uid = engine.getUID();
boolean found = false;
String engine_str = "";
for (int i = 0; i < updated_engines.length; i++) {
Engine updated_engine = updated_engines[i];
engine_str += (i == 0 ? "" : ",") + updated_engine.getName() + ": uid=" + updated_engine.getUID() + ",version=" + updated_engine.getVersion();
if (updated_engine.getUID().equals(existing_uid)) {
found = true;
}
}
if (!found) {
log(" existing engine not found in updated set, deleting");
engine.delete();
}
log(" update complete: new engines=" + engine_str);
} else {
log(" no engines found in vuze file");
}
return (true);
}
} finally {
is.close();
}
} catch (Throwable e) {
log(" update check failed", e);
return (false);
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloader in project BiglyBT by BiglySoftware.
the class WebEngine method getWebPageContentSupport.
private pageDetails getWebPageContentSupport(Proxy proxy, String proxy_host, String searchURL, SearchParameter[] searchParameters, Map<String, String> searchContext, String headers, boolean only_if_modified) throws SearchException {
try {
TorrentUtils.setTLSDescription("Search: " + getName());
if (requiresLogin()) {
throw new SearchLoginException("login required");
}
boolean vuze_file = searchURL.toLowerCase().startsWith("vuze:");
if (!vuze_file) {
String[] from_strs = new String[searchParameters.length];
String[] to_strs = new String[searchParameters.length];
for (int i = 0; i < searchParameters.length; i++) {
SearchParameter parameter = searchParameters[i];
from_strs[i] = "%" + parameter.getMatchPattern();
to_strs[i] = URLEncoder.encode(parameter.getValue(), "UTF-8");
}
searchURL = GeneralUtils.replaceAll(searchURL, from_strs, to_strs);
Iterator<Map.Entry<String, String>> it = searchContext.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
String key = entry.getKey();
if (supportsContext(key)) {
if (searchURL.indexOf('?') == -1) {
searchURL += "?";
} else {
searchURL += "&";
}
String value = entry.getValue();
searchURL += key + "=" + URLEncoder.encode(value, "UTF-8");
}
}
}
// System.out.println(searchURL);
// hack to support POST by encoding into URL
// http://xxxx/index.php?main=search&azmethod=post_basic:SearchString1=%s&SearchString=&search=Search
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
URL initial_url;
ResourceDownloader initial_url_rd;
int post_pos = searchURL.indexOf("azmethod=");
if (post_pos > 0) {
String post_params = searchURL.substring(post_pos + 9);
searchURL = searchURL.substring(0, post_pos - 1);
debugLog("search_url: " + searchURL + ", post=" + post_params);
initial_url = new URL(searchURL);
int sep = post_params.indexOf(':');
String type = post_params.substring(0, sep);
if (!type.equals("post_basic")) {
throw (new SearchException("Only basic type supported"));
}
post_params = post_params.substring(sep + 1);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url, post_params);
} else {
initial_url_rd = rdf.create(initial_url, post_params, proxy);
}
initial_url_rd.setProperty("URL_Content-Type", "application/x-www-form-urlencoded");
} else {
debugLog("search_url: " + searchURL);
initial_url = new URL(searchURL);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url);
} else {
initial_url_rd = rdf.create(initial_url, proxy);
}
}
if (proxy_host != null) {
initial_url_rd.setProperty("URL_HOST", proxy_host);
}
setHeaders(initial_url_rd, headers);
if (needsAuth && local_cookies != null) {
initial_url_rd.setProperty("URL_Cookie", local_cookies);
} else if (fullCookies != null && fullCookies.length() > 0) {
initial_url_rd.setProperty("URL_Cookie", fullCookies);
}
if (only_if_modified) {
String last_modified = getLocalString(LD_LAST_MODIFIED);
String etag = getLocalString(LD_ETAG);
if (last_modified != null) {
initial_url_rd.setProperty("URL_If-Modified-Since", last_modified);
}
if (etag != null) {
initial_url_rd.setProperty("URL_If-None-Match", etag);
}
}
InputStream is = null;
try {
String content_charset = "UTF-8";
ResourceDownloader mr_rd = null;
if (initial_url.getProtocol().equalsIgnoreCase("file")) {
// handle file://c:/ - map to file:/c:/
String str = initial_url.toExternalForm();
if (initial_url.getAuthority() != null) {
str = str.replaceFirst("://", ":/");
}
int pos = str.indexOf('?');
if (pos != -1) {
str = str.substring(0, pos);
}
is = new FileInputStream(new File(new URL(str).toURI()));
} else {
if (proxy == null) {
initial_url_rd.setProperty("URL_Connect_Timeout", 10 * 1000);
initial_url_rd.setProperty("URL_Read_Timeout", 10 * 1000);
}
mr_rd = rdf.getMetaRefreshDownloader(initial_url_rd);
try {
is = mr_rd.download();
} catch (ResourceDownloaderException e) {
Long response = (Long) mr_rd.getProperty("URL_HTTP_Response");
if (response != null && response.longValue() == 304) {
return (new pageDetails(initial_url, initial_url, ""));
} else {
throw (e);
}
}
if (needsAuth) {
List cookies_list = (List) mr_rd.getProperty("URL_Set-Cookie");
List cookies_set = new ArrayList();
if (cookies_list != null) {
for (int i = 0; i < cookies_list.size(); i++) {
String[] cookies = ((String) cookies_list.get(i)).split(";");
for (int j = 0; j < cookies.length; j++) {
String cookie = cookies[j].trim();
if (cookie.indexOf('=') != -1) {
cookies_set.add(cookie);
}
}
}
}
// well, not much we can do with the cookies anyway as in general the ones
// set are the ones missing/expired, not the existing ones. That is, we can't
// deduce anything from the fact that a required cookie is not 'set' here
// the most we could do is catch a server that explicitly deleted invalid
// cookies by expiring it, but I doubt this is a common practice.
// Also note the complexity of cookie syntax
// Set-Cookie: old standard using expires=, new using MaxAge
// Set-Cookie2:
// Maybe use http://jcookie.sourceforge.net/ if needed
}
if (only_if_modified) {
String last_modified = extractProperty(mr_rd.getProperty("URL_Last-Modified"));
String etag = extractProperty(mr_rd.getProperty("URL_ETag"));
if (last_modified != null) {
setLocalString(LD_LAST_MODIFIED, last_modified);
}
if (etag != null) {
setLocalString(LD_ETAG, etag);
}
}
List cts = (List) mr_rd.getProperty("URL_Content-Type");
if (cts != null && cts.size() > 0) {
String content_type = (String) cts.get(0);
int pos = content_type.toLowerCase().indexOf("charset");
if (pos != -1) {
content_type = content_type.substring(pos + 1);
pos = content_type.indexOf('=');
if (pos != -1) {
content_type = content_type.substring(pos + 1).trim();
pos = content_type.indexOf(';');
if (pos != -1) {
content_type = content_type.substring(0, pos).trim();
}
if (content_type.startsWith("\"")) {
content_type = content_type.substring(1).trim();
}
if (content_type.endsWith("\"")) {
content_type = content_type.substring(0, content_type.length() - 1).trim();
}
try {
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable e) {
try {
// handle lowercase 'utf-8' for example
content_type = content_type.toUpperCase();
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable f) {
log("Content type '" + content_type + "' not supported", f);
}
}
}
}
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
byte[] buffer = new byte[8192];
while (true) {
int len = is.read(buffer);
if (len <= 0) {
break;
}
baos.write(buffer, 0, len);
}
byte[] data = baos.toByteArray();
if (vuze_file) {
try {
VuzeFileHandler vfh = VuzeFileHandler.getSingleton();
VuzeFile vf = vfh.loadVuzeFile(data);
vfh.handleFiles(new VuzeFile[] { vf }, VuzeFileComponent.COMP_TYPE_NONE);
} catch (Throwable e) {
Debug.out(e);
}
return (new pageDetails(initial_url, initial_url, null));
}
String page = null;
String content = new String(data, 0, Math.min(data.length, 2048), content_charset);
String lc_content = content.toLowerCase();
{
// first look for xml charset
// e.g. <?xml version="1.0" encoding="windows-1251" ?>
int pos1 = lc_content.indexOf("<?xml");
if (pos1 != -1) {
int pos2 = lc_content.indexOf("?>");
if (pos2 != -1) {
int pos3 = lc_content.indexOf("encoding", pos1);
if (pos3 != -1) {
pos3 = lc_content.indexOf("\"", pos3);
}
if (pos3 > pos1 && pos3 < pos2) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 > pos3 && pos4 < pos2) {
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from xml tag: " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
}
}
}
}
}
if (page == null) {
// next look for http-equiv charset
// e.g. <meta http-equiv="Content-Type" content="text/html; charset=windows-1251" />
int pos = 0;
while (true) {
int pos1 = lc_content.indexOf("http-equiv", pos);
if (pos1 != -1) {
int pos2 = lc_content.indexOf(">", pos1);
if (pos2 != -1) {
int pos3 = lc_content.indexOf("charset", pos1);
if (pos3 != -1 && pos3 < pos2) {
pos3 = lc_content.indexOf("=", pos3);
if (pos3 != -1) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 != -1) {
int pos5 = lc_content.indexOf(";", pos3);
if (pos5 != -1 && pos5 < pos4) {
pos4 = pos5;
}
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from http-equiv : " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
break;
}
}
}
pos = pos2;
} else {
break;
}
} else {
break;
}
}
}
if (page == null) {
page = new String(data, content_charset);
}
debugLog("page:");
debugLog(page);
try {
Matcher m = baseTagPattern.matcher(page);
if (m.find()) {
basePage = m.group(1);
debugLog("base_page: " + basePage);
}
} catch (Exception e) {
// No BASE tag in the page
}
URL final_url = initial_url;
if (mr_rd != null) {
URL x = (URL) mr_rd.getProperty("URL_URL");
if (x != null) {
final_url = x;
}
}
return (new pageDetails(initial_url, final_url, page));
} finally {
if (is != null) {
is.close();
}
}
} catch (SearchException e) {
throw (e);
} catch (Throwable e) {
// e.printStackTrace();
debugLog("Failed to load page: " + Debug.getNestedExceptionMessageAndStack(e));
throw (new SearchException("Failed to load page", e));
} finally {
TorrentUtils.setTLSDescription(null);
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloader in project BiglyBT by BiglySoftware.
the class IpFilterAutoLoaderImpl method loadOtherFilters.
protected void loadOtherFilters(boolean allowAsyncDownloading, boolean loadOldWhileAsyncDownloading) {
int p2bVersion = -1;
try {
class_mon.enter();
List new_ipRanges = new ArrayList(1024);
InputStream fin = null;
BufferedInputStream bin = null;
boolean isURL = false;
try {
// open the file
String file = COConfigurationManager.getStringParameter(CFG_AUTOLOAD_FILE);
Logger.log(new LogEvent(LOGID, "IP Filter file: " + file));
File filtersFile = new File(file);
if (filtersFile.exists()) {
isURL = false;
} else {
if (!UrlUtils.isURL(file)) {
return;
}
isURL = true;
filtersFile = FileUtil.getUserFile("ipfilter.dl");
if (filtersFile.exists()) {
if (allowAsyncDownloading) {
Logger.log(new LogEvent(LOGID, "Downloading " + file + " async"));
downloadFiltersAsync(new URL(file));
if (!loadOldWhileAsyncDownloading) {
return;
}
}
} else {
// no old dl, download sync now
Logger.log(new LogEvent(LOGID, "sync Downloading " + file));
try {
ResourceDownloader rd = ResourceDownloaderFactoryImpl.getSingleton().create(new URL(file));
fin = rd.download();
FileUtil.copyFile(fin, filtersFile);
setNextAutoDownload(true);
} catch (ResourceDownloaderException e) {
return;
}
}
}
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
// extract (g)zip'd file and open that
byte[] headerBytes = new byte[2];
bin.mark(3);
bin.read(headerBytes, 0, 2);
bin.reset();
if (headerBytes[1] == (byte) 0x8b && headerBytes[0] == 0x1f) {
GZIPInputStream gzip = new GZIPInputStream(bin);
filtersFile = FileUtil.getUserFile("ipfilter.ext");
FileUtil.copyFile(gzip, filtersFile);
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
} else if (headerBytes[0] == 0x50 && headerBytes[1] == 0x4b) {
ZipInputStream zip = new ZipInputStream(bin);
ZipEntry zipEntry = zip.getNextEntry();
// Skip small files
while (zipEntry != null && zipEntry.getSize() < 1024 * 1024) {
zipEntry = zip.getNextEntry();
}
if (zipEntry == null) {
return;
}
filtersFile = FileUtil.getUserFile("ipfilter.ext");
FileUtil.copyFile(zip, filtersFile);
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
}
bin.mark(8);
p2bVersion = getP2BFileVersion(bin);
if (p2bVersion < 1 || p2bVersion > 3) {
bin.reset();
loadDATFilters(bin);
return;
}
byte[] descBytes = new byte[255];
byte[] ipBytes = new byte[4];
String encoding = p2bVersion == 1 ? "ISO-8859-1" : "UTF-8";
if (p2bVersion == 1 || p2bVersion == 2) {
while (true) {
String description = readString(bin, descBytes, encoding);
int read = bin.read(ipBytes);
if (read < 4) {
break;
}
int startIp = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
break;
}
int endIp = ByteFormatter.byteArrayToInt(ipBytes);
IpRangeImpl ipRange = new IpRangeImpl(description, startIp, endIp, true);
ipRange.setAddedToRangeList(true);
new_ipRanges.add(ipRange);
}
} else {
// version 3
int read = bin.read(ipBytes);
if (read < 4) {
return;
}
int numDescs = ByteFormatter.byteArrayToInt(ipBytes);
String[] descs = new String[numDescs];
for (int i = 0; i < numDescs; i++) {
descs[i] = readString(bin, descBytes, encoding);
}
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int numRanges = ByteFormatter.byteArrayToInt(ipBytes);
for (int i = 0; i < numRanges; i++) {
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int descIdx = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int startIp = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int endIp = ByteFormatter.byteArrayToInt(ipBytes);
String description = descIdx < descs.length && descIdx >= 0 ? descs[descIdx] : "";
IpRangeImpl ipRange = new IpRangeImpl(description, startIp, endIp, true);
ipRange.setAddedToRangeList(true);
new_ipRanges.add(ipRange);
}
}
} catch (IOException e) {
Debug.out(e);
} finally {
if (bin != null) {
try {
bin.close();
} catch (Throwable e) {
}
}
if (fin != null) {
try {
fin.close();
} catch (Throwable e) {
}
}
Iterator it = new_ipRanges.iterator();
while (it.hasNext()) {
((IpRange) it.next()).checkValid();
}
ipFilter.markAsUpToDate();
if (!isURL) {
setFileReloadTimer();
}
}
} finally {
class_mon.exit();
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloader in project BiglyBT by BiglySoftware.
the class IpFilterAutoLoaderImpl method downloadFiltersAsync.
/**
* @param url
*
* @since 3.0.1.5
*/
void downloadFiltersAsync(URL url) {
ResourceDownloader rd = ResourceDownloaderFactoryImpl.getSingleton().create(url);
// old dl exists, load old one while new one downloads async
rd.addListener(new ResourceDownloaderAdapter() {
// @see com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderAdapter#reportPercentComplete(com.biglybt.pif.utils.resourcedownloader.ResourceDownloader, int)
@Override
public void reportPercentComplete(ResourceDownloader downloader, int percentage) {
}
@Override
public boolean completed(ResourceDownloader downloader, InputStream data) {
try {
setNextAutoDownload(true);
Logger.log(new LogEvent(LOGID, "downloaded..waiting"));
// since this is a different thread, we can use class_mon as
// a cheap semaphore to wait until previous load completes
class_mon.enter();
Logger.log(new LogEvent(LOGID, "downloaded.. copying"));
try {
FileUtil.copyFile(data, FileUtil.getUserFile("ipfilter.dl"));
AEThread thread = new AEThread("reload ipfilters", true) {
@Override
public void runSupport() {
try {
UIFunctions uif = UIFunctionsManager.getUIFunctions();
if (uif != null) {
uif.setStatusText("reloading.filters");
}
ipFilter.reload(false);
if (uif != null) {
uif.setStatusText(null);
}
} catch (Exception e) {
Debug.out(e);
}
}
};
thread.setPriority(Thread.NORM_PRIORITY - 1);
thread.start();
} catch (Exception e) {
Debug.out(e);
}
} finally {
class_mon.exit();
}
return true;
}
});
rd.asyncDownload();
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloader in project BiglyBT by BiglySoftware.
the class TorrentUtils method download.
public static TOTorrent download(URL url, long timeout) throws IOException {
try {
PluginProxy plugin_proxy = null;
try {
if (AENetworkClassifier.categoriseAddress(url.getHost()) != AENetworkClassifier.AT_PUBLIC) {
plugin_proxy = AEProxyFactory.getPluginProxy("torrent download", url);
}
ResourceDownloader rd;
if (plugin_proxy == null) {
rd = new ResourceDownloaderFactoryImpl().create(url);
} else {
rd = new ResourceDownloaderFactoryImpl().create(plugin_proxy.getURL(), plugin_proxy.getProxy());
rd.setProperty("URL_HOST", url.getHost());
}
if (timeout > 0) {
rd.setProperty("URL_Connect_Timeout", timeout);
rd.setProperty("URL_Read_Timeout", timeout);
}
byte[] bytes = FileUtil.readInputStreamAsByteArray(rd.download(), BDecoder.MAX_BYTE_ARRAY_SIZE);
return (TOTorrentFactory.deserialiseFromBEncodedByteArray(bytes));
} finally {
if (plugin_proxy != null) {
plugin_proxy.setOK(true);
}
}
} catch (IOException e) {
throw ((IOException) e);
} catch (Throwable e) {
throw (new IOException(Debug.getNestedExceptionMessage(e)));
}
}
Aggregations