use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderException in project BiglyBT by BiglySoftware.
the class ResourceDownloaderTimeoutImpl method asyncGetSize.
public void asyncGetSize() {
try {
this_mon.enter();
if (!cancelled) {
current_downloader = delegate.getClone(this);
Thread size_thread = new AEThread("ResourceDownloader:size getter") {
@Override
public void runSupport() {
try {
long res = current_downloader.getSize();
result = new Long(res);
setProperties(current_downloader);
done_sem.release();
} catch (ResourceDownloaderException e) {
failed(current_downloader, e);
}
}
};
size_thread.setDaemon(true);
size_thread.start();
Thread t = new AEThread("ResourceDownloaderTimeout") {
@Override
public void runSupport() {
try {
Thread.sleep(timeout_millis);
cancel(new ResourceDownloaderException(ResourceDownloaderTimeoutImpl.this, "getSize timeout"));
} catch (Throwable e) {
Debug.printStackTrace(e);
}
}
};
t.setDaemon(true);
t.start();
}
} finally {
this_mon.exit();
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderException in project BiglyBT by BiglySoftware.
the class ResourceDownloaderTorrentImpl method getSizeSupport.
protected long getSizeSupport() throws ResourceDownloaderException {
try {
if (torrent_holder[0] == null) {
ResourceDownloader x = delegate.getClone(this);
addReportListener(x);
InputStream is = x.download();
try {
torrent_holder[0] = TOTorrentFactory.deserialiseFromBEncodedInputStream(is);
} finally {
try {
is.close();
} catch (IOException e) {
}
}
if (!torrent_holder[0].isSimpleTorrent()) {
throw (new ResourceDownloaderException(this, "Only simple torrents supported"));
}
}
try {
String file_str = new String(torrent_holder[0].getName());
int pos = file_str.lastIndexOf(".");
String file_type;
if (pos != -1) {
file_type = file_str.substring(pos + 1);
} else {
file_type = null;
}
setProperty(ResourceDownloader.PR_STRING_CONTENT_TYPE, HTTPUtils.guessContentTypeFromFileType(file_type));
} catch (Throwable e) {
Debug.printStackTrace(e);
}
return (torrent_holder[0].getSize());
} catch (TOTorrentException e) {
throw (new ResourceDownloaderException(this, "Torrent deserialisation failed", e));
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderException in project BiglyBT by BiglySoftware.
the class WebEngine method getWebPageContentSupport.
private pageDetails getWebPageContentSupport(Proxy proxy, String proxy_host, String searchURL, SearchParameter[] searchParameters, Map<String, String> searchContext, String headers, boolean only_if_modified) throws SearchException {
try {
TorrentUtils.setTLSDescription("Search: " + getName());
if (requiresLogin()) {
throw new SearchLoginException("login required");
}
boolean vuze_file = searchURL.toLowerCase().startsWith("vuze:");
if (!vuze_file) {
String[] from_strs = new String[searchParameters.length];
String[] to_strs = new String[searchParameters.length];
for (int i = 0; i < searchParameters.length; i++) {
SearchParameter parameter = searchParameters[i];
from_strs[i] = "%" + parameter.getMatchPattern();
to_strs[i] = URLEncoder.encode(parameter.getValue(), "UTF-8");
}
searchURL = GeneralUtils.replaceAll(searchURL, from_strs, to_strs);
Iterator<Map.Entry<String, String>> it = searchContext.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
String key = entry.getKey();
if (supportsContext(key)) {
if (searchURL.indexOf('?') == -1) {
searchURL += "?";
} else {
searchURL += "&";
}
String value = entry.getValue();
searchURL += key + "=" + URLEncoder.encode(value, "UTF-8");
}
}
}
// System.out.println(searchURL);
// hack to support POST by encoding into URL
// http://xxxx/index.php?main=search&azmethod=post_basic:SearchString1=%s&SearchString=&search=Search
ResourceDownloaderFactory rdf = StaticUtilities.getResourceDownloaderFactory();
URL initial_url;
ResourceDownloader initial_url_rd;
int post_pos = searchURL.indexOf("azmethod=");
if (post_pos > 0) {
String post_params = searchURL.substring(post_pos + 9);
searchURL = searchURL.substring(0, post_pos - 1);
debugLog("search_url: " + searchURL + ", post=" + post_params);
initial_url = new URL(searchURL);
int sep = post_params.indexOf(':');
String type = post_params.substring(0, sep);
if (!type.equals("post_basic")) {
throw (new SearchException("Only basic type supported"));
}
post_params = post_params.substring(sep + 1);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url, post_params);
} else {
initial_url_rd = rdf.create(initial_url, post_params, proxy);
}
initial_url_rd.setProperty("URL_Content-Type", "application/x-www-form-urlencoded");
} else {
debugLog("search_url: " + searchURL);
initial_url = new URL(searchURL);
if (proxy == null) {
initial_url_rd = rdf.create(initial_url);
} else {
initial_url_rd = rdf.create(initial_url, proxy);
}
}
if (proxy_host != null) {
initial_url_rd.setProperty("URL_HOST", proxy_host);
}
setHeaders(initial_url_rd, headers);
if (needsAuth && local_cookies != null) {
initial_url_rd.setProperty("URL_Cookie", local_cookies);
} else if (fullCookies != null && fullCookies.length() > 0) {
initial_url_rd.setProperty("URL_Cookie", fullCookies);
}
if (only_if_modified) {
String last_modified = getLocalString(LD_LAST_MODIFIED);
String etag = getLocalString(LD_ETAG);
if (last_modified != null) {
initial_url_rd.setProperty("URL_If-Modified-Since", last_modified);
}
if (etag != null) {
initial_url_rd.setProperty("URL_If-None-Match", etag);
}
}
InputStream is = null;
try {
String content_charset = "UTF-8";
ResourceDownloader mr_rd = null;
if (initial_url.getProtocol().equalsIgnoreCase("file")) {
// handle file://c:/ - map to file:/c:/
String str = initial_url.toExternalForm();
if (initial_url.getAuthority() != null) {
str = str.replaceFirst("://", ":/");
}
int pos = str.indexOf('?');
if (pos != -1) {
str = str.substring(0, pos);
}
is = new FileInputStream(new File(new URL(str).toURI()));
} else {
if (proxy == null) {
initial_url_rd.setProperty("URL_Connect_Timeout", 10 * 1000);
initial_url_rd.setProperty("URL_Read_Timeout", 10 * 1000);
}
mr_rd = rdf.getMetaRefreshDownloader(initial_url_rd);
try {
is = mr_rd.download();
} catch (ResourceDownloaderException e) {
Long response = (Long) mr_rd.getProperty("URL_HTTP_Response");
if (response != null && response.longValue() == 304) {
return (new pageDetails(initial_url, initial_url, ""));
} else {
throw (e);
}
}
if (needsAuth) {
List cookies_list = (List) mr_rd.getProperty("URL_Set-Cookie");
List cookies_set = new ArrayList();
if (cookies_list != null) {
for (int i = 0; i < cookies_list.size(); i++) {
String[] cookies = ((String) cookies_list.get(i)).split(";");
for (int j = 0; j < cookies.length; j++) {
String cookie = cookies[j].trim();
if (cookie.indexOf('=') != -1) {
cookies_set.add(cookie);
}
}
}
}
// well, not much we can do with the cookies anyway as in general the ones
// set are the ones missing/expired, not the existing ones. That is, we can't
// deduce anything from the fact that a required cookie is not 'set' here
// the most we could do is catch a server that explicitly deleted invalid
// cookies by expiring it, but I doubt this is a common practice.
// Also note the complexity of cookie syntax
// Set-Cookie: old standard using expires=, new using MaxAge
// Set-Cookie2:
// Maybe use http://jcookie.sourceforge.net/ if needed
}
if (only_if_modified) {
String last_modified = extractProperty(mr_rd.getProperty("URL_Last-Modified"));
String etag = extractProperty(mr_rd.getProperty("URL_ETag"));
if (last_modified != null) {
setLocalString(LD_LAST_MODIFIED, last_modified);
}
if (etag != null) {
setLocalString(LD_ETAG, etag);
}
}
List cts = (List) mr_rd.getProperty("URL_Content-Type");
if (cts != null && cts.size() > 0) {
String content_type = (String) cts.get(0);
int pos = content_type.toLowerCase().indexOf("charset");
if (pos != -1) {
content_type = content_type.substring(pos + 1);
pos = content_type.indexOf('=');
if (pos != -1) {
content_type = content_type.substring(pos + 1).trim();
pos = content_type.indexOf(';');
if (pos != -1) {
content_type = content_type.substring(0, pos).trim();
}
if (content_type.startsWith("\"")) {
content_type = content_type.substring(1).trim();
}
if (content_type.endsWith("\"")) {
content_type = content_type.substring(0, content_type.length() - 1).trim();
}
try {
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable e) {
try {
// handle lowercase 'utf-8' for example
content_type = content_type.toUpperCase();
if (Charset.isSupported(content_type)) {
debugLog("charset: " + content_type);
content_charset = content_type;
}
} catch (Throwable f) {
log("Content type '" + content_type + "' not supported", f);
}
}
}
}
}
}
ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
byte[] buffer = new byte[8192];
while (true) {
int len = is.read(buffer);
if (len <= 0) {
break;
}
baos.write(buffer, 0, len);
}
byte[] data = baos.toByteArray();
if (vuze_file) {
try {
VuzeFileHandler vfh = VuzeFileHandler.getSingleton();
VuzeFile vf = vfh.loadVuzeFile(data);
vfh.handleFiles(new VuzeFile[] { vf }, VuzeFileComponent.COMP_TYPE_NONE);
} catch (Throwable e) {
Debug.out(e);
}
return (new pageDetails(initial_url, initial_url, null));
}
String page = null;
String content = new String(data, 0, Math.min(data.length, 2048), content_charset);
String lc_content = content.toLowerCase();
{
// first look for xml charset
// e.g. <?xml version="1.0" encoding="windows-1251" ?>
int pos1 = lc_content.indexOf("<?xml");
if (pos1 != -1) {
int pos2 = lc_content.indexOf("?>");
if (pos2 != -1) {
int pos3 = lc_content.indexOf("encoding", pos1);
if (pos3 != -1) {
pos3 = lc_content.indexOf("\"", pos3);
}
if (pos3 > pos1 && pos3 < pos2) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 > pos3 && pos4 < pos2) {
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from xml tag: " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
}
}
}
}
}
if (page == null) {
// next look for http-equiv charset
// e.g. <meta http-equiv="Content-Type" content="text/html; charset=windows-1251" />
int pos = 0;
while (true) {
int pos1 = lc_content.indexOf("http-equiv", pos);
if (pos1 != -1) {
int pos2 = lc_content.indexOf(">", pos1);
if (pos2 != -1) {
int pos3 = lc_content.indexOf("charset", pos1);
if (pos3 != -1 && pos3 < pos2) {
pos3 = lc_content.indexOf("=", pos3);
if (pos3 != -1) {
pos3++;
int pos4 = lc_content.indexOf("\"", pos3);
if (pos4 != -1) {
int pos5 = lc_content.indexOf(";", pos3);
if (pos5 != -1 && pos5 < pos4) {
pos4 = pos5;
}
String encoding = content.substring(pos3, pos4).trim();
try {
if (Charset.isSupported(encoding)) {
debugLog("charset from http-equiv : " + encoding);
content_charset = encoding;
// some feeds have crap at the start which makes pos2 mismatch for the above '?' - adjust if necessary
int data_start = pos2;
int max_skip = 64;
while (data[data_start] != '?' && max_skip-- > 0) {
data_start++;
}
page = content.substring(0, pos3) + "utf-8" + content.substring(pos4, pos2) + new String(data, data_start, data.length - data_start, content_charset);
}
} catch (Throwable e) {
log("Content type '" + encoding + "' not supported", e);
}
break;
}
}
}
pos = pos2;
} else {
break;
}
} else {
break;
}
}
}
if (page == null) {
page = new String(data, content_charset);
}
debugLog("page:");
debugLog(page);
try {
Matcher m = baseTagPattern.matcher(page);
if (m.find()) {
basePage = m.group(1);
debugLog("base_page: " + basePage);
}
} catch (Exception e) {
// No BASE tag in the page
}
URL final_url = initial_url;
if (mr_rd != null) {
URL x = (URL) mr_rd.getProperty("URL_URL");
if (x != null) {
final_url = x;
}
}
return (new pageDetails(initial_url, final_url, page));
} finally {
if (is != null) {
is.close();
}
}
} catch (SearchException e) {
throw (e);
} catch (Throwable e) {
// e.printStackTrace();
debugLog("Failed to load page: " + Debug.getNestedExceptionMessageAndStack(e));
throw (new SearchException("Failed to load page", e));
} finally {
TorrentUtils.setTLSDescription(null);
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderException in project BiglyBT by BiglySoftware.
the class IpFilterAutoLoaderImpl method loadOtherFilters.
protected void loadOtherFilters(boolean allowAsyncDownloading, boolean loadOldWhileAsyncDownloading) {
int p2bVersion = -1;
try {
class_mon.enter();
List new_ipRanges = new ArrayList(1024);
InputStream fin = null;
BufferedInputStream bin = null;
boolean isURL = false;
try {
// open the file
String file = COConfigurationManager.getStringParameter(CFG_AUTOLOAD_FILE);
Logger.log(new LogEvent(LOGID, "IP Filter file: " + file));
File filtersFile = new File(file);
if (filtersFile.exists()) {
isURL = false;
} else {
if (!UrlUtils.isURL(file)) {
return;
}
isURL = true;
filtersFile = FileUtil.getUserFile("ipfilter.dl");
if (filtersFile.exists()) {
if (allowAsyncDownloading) {
Logger.log(new LogEvent(LOGID, "Downloading " + file + " async"));
downloadFiltersAsync(new URL(file));
if (!loadOldWhileAsyncDownloading) {
return;
}
}
} else {
// no old dl, download sync now
Logger.log(new LogEvent(LOGID, "sync Downloading " + file));
try {
ResourceDownloader rd = ResourceDownloaderFactoryImpl.getSingleton().create(new URL(file));
fin = rd.download();
FileUtil.copyFile(fin, filtersFile);
setNextAutoDownload(true);
} catch (ResourceDownloaderException e) {
return;
}
}
}
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
// extract (g)zip'd file and open that
byte[] headerBytes = new byte[2];
bin.mark(3);
bin.read(headerBytes, 0, 2);
bin.reset();
if (headerBytes[1] == (byte) 0x8b && headerBytes[0] == 0x1f) {
GZIPInputStream gzip = new GZIPInputStream(bin);
filtersFile = FileUtil.getUserFile("ipfilter.ext");
FileUtil.copyFile(gzip, filtersFile);
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
} else if (headerBytes[0] == 0x50 && headerBytes[1] == 0x4b) {
ZipInputStream zip = new ZipInputStream(bin);
ZipEntry zipEntry = zip.getNextEntry();
// Skip small files
while (zipEntry != null && zipEntry.getSize() < 1024 * 1024) {
zipEntry = zip.getNextEntry();
}
if (zipEntry == null) {
return;
}
filtersFile = FileUtil.getUserFile("ipfilter.ext");
FileUtil.copyFile(zip, filtersFile);
fin = new FileInputStream(filtersFile);
bin = new BufferedInputStream(fin, 16384);
}
bin.mark(8);
p2bVersion = getP2BFileVersion(bin);
if (p2bVersion < 1 || p2bVersion > 3) {
bin.reset();
loadDATFilters(bin);
return;
}
byte[] descBytes = new byte[255];
byte[] ipBytes = new byte[4];
String encoding = p2bVersion == 1 ? "ISO-8859-1" : "UTF-8";
if (p2bVersion == 1 || p2bVersion == 2) {
while (true) {
String description = readString(bin, descBytes, encoding);
int read = bin.read(ipBytes);
if (read < 4) {
break;
}
int startIp = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
break;
}
int endIp = ByteFormatter.byteArrayToInt(ipBytes);
IpRangeImpl ipRange = new IpRangeImpl(description, startIp, endIp, true);
ipRange.setAddedToRangeList(true);
new_ipRanges.add(ipRange);
}
} else {
// version 3
int read = bin.read(ipBytes);
if (read < 4) {
return;
}
int numDescs = ByteFormatter.byteArrayToInt(ipBytes);
String[] descs = new String[numDescs];
for (int i = 0; i < numDescs; i++) {
descs[i] = readString(bin, descBytes, encoding);
}
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int numRanges = ByteFormatter.byteArrayToInt(ipBytes);
for (int i = 0; i < numRanges; i++) {
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int descIdx = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int startIp = ByteFormatter.byteArrayToInt(ipBytes);
read = bin.read(ipBytes);
if (read < 4) {
return;
}
int endIp = ByteFormatter.byteArrayToInt(ipBytes);
String description = descIdx < descs.length && descIdx >= 0 ? descs[descIdx] : "";
IpRangeImpl ipRange = new IpRangeImpl(description, startIp, endIp, true);
ipRange.setAddedToRangeList(true);
new_ipRanges.add(ipRange);
}
}
} catch (IOException e) {
Debug.out(e);
} finally {
if (bin != null) {
try {
bin.close();
} catch (Throwable e) {
}
}
if (fin != null) {
try {
fin.close();
} catch (Throwable e) {
}
}
Iterator it = new_ipRanges.iterator();
while (it.hasNext()) {
((IpRange) it.next()).checkValid();
}
ipFilter.markAsUpToDate();
if (!isURL) {
setFileReloadTimer();
}
}
} finally {
class_mon.exit();
}
}
use of com.biglybt.pif.utils.resourcedownloader.ResourceDownloaderException in project BiglyBT by BiglySoftware.
the class MagnetPlugin method getSecondaryLookupResult.
protected byte[] getSecondaryLookupResult(final Object[] result) throws ResourceDownloaderException {
if (result == null) {
return (null);
}
Object x;
synchronized (result) {
x = result[0];
result[0] = null;
}
if (x instanceof InputStream) {
InputStream is = (InputStream) x;
try {
TOTorrent t = TOTorrentFactory.deserialiseFromBEncodedInputStream(is);
TorrentUtils.setPeerCacheValid(t);
return (BEncoder.encode(t.serialiseToMap()));
} catch (Throwable e) {
}
} else if (x instanceof ResourceDownloaderException) {
throw ((ResourceDownloaderException) x);
}
return (null);
}
Aggregations