use of com.biglybt.core.logging.LogEvent in project BiglyBT by BiglySoftware.
the class TrackerStatus method runScrapesSupport.
protected void runScrapesSupport(ArrayList<TRTrackerScraperResponseImpl> allResponses, boolean force) {
try {
if (Logger.isEnabled()) {
Logger.log(new LogEvent(LOGID, "TrackerStatus: scraping '" + scrapeURL + "', for " + allResponses.size() + " of " + hashes.size() + " hashes" + ", single_hash_scrapes: " + (bSingleHashScrapes ? "Y" : "N")));
}
boolean original_bSingleHashScrapes = bSingleHashScrapes;
boolean disable_all_scrapes = !COConfigurationManager.getBooleanParameter("Tracker Client Scrape Enable");
byte[] scrape_reply = null;
List<HashWrapper> hashesInQuery = new ArrayList<>(allResponses.size());
List<TRTrackerScraperResponseImpl> responsesInQuery = new ArrayList<>(allResponses.size());
List<HashWrapper> hashesForUDP = new ArrayList<>();
List<TRTrackerScraperResponseImpl> responsesForUDP = new ArrayList<>();
List<TRTrackerScraperResponseImpl> activeResponses = responsesInQuery;
try {
// if URL already includes a query component then just append our
// params
HashWrapper one_of_the_hashes = null;
// TRTrackerScraperResponseImpl one_of_the_responses = null;
char first_separator = scrapeURL.indexOf('?') == -1 ? '?' : '&';
String info_hash = "";
String flags = "";
for (TRTrackerScraperResponseImpl response : allResponses) {
HashWrapper hash = response.getHash();
if (Logger.isEnabled())
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, "TrackerStatus: " + scrapeURL + ": scraping, single_hash_scrapes = " + bSingleHashScrapes));
if (!scraper.isNetworkEnabled(hash, tracker_url)) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "networkdisabled"));
scraper.scrapeReceived(response);
} else if ((!force) && (disable_all_scrapes || !scraper.isTorrentScrapable(hash))) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "disabled"));
scraper.scrapeReceived(response);
} else {
hashesInQuery.add(hash);
responsesInQuery.add(response);
response.setStatus(TRTrackerScraperResponse.ST_SCRAPING, MessageText.getString(SS + "scraping"));
// technically haven't recieved a scrape yet, but we need
// to notify listeners (the ones that display status)
scraper.scrapeReceived(response);
// the client-id stuff RELIES on info_hash being the FIRST
// parameter added by
// us to the URL, so don't change it!
info_hash += ((one_of_the_hashes != null) ? '&' : first_separator) + "info_hash=";
info_hash += URLEncoder.encode(new String(hash.getBytes(), Constants.BYTE_ENCODING), Constants.BYTE_ENCODING).replaceAll("\\+", "%20");
Object[] extensions = scraper.getExtensions(hash);
if (extensions != null) {
if (extensions[0] != null) {
info_hash += (String) extensions[0];
}
flags += (Character) extensions[1];
} else {
flags += TRTrackerScraperClientResolver.FL_NONE;
}
one_of_the_hashes = hash;
if (hashesForUDP.size() < 70) {
hashesForUDP.add(hash);
responsesForUDP.add(response);
}
}
}
if (one_of_the_hashes == null) {
return;
}
String request = scrapeURL + info_hash;
if (az_tracker) {
String port_details = TRTrackerUtils.getPortsForURL();
request += port_details;
request += "&azsf=" + flags + "&azver=" + TRTrackerAnnouncer.AZ_TRACKER_VERSION_CURRENT;
}
URL reqUrl = new URL(request);
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "Accessing scrape interface using url : " + reqUrl));
ByteArrayOutputStream message = new ByteArrayOutputStream();
long scrapeStartTime = SystemTime.getCurrentTime();
URL redirect_url = null;
String protocol = reqUrl.getProtocol();
URL udpScrapeURL = null;
boolean auto_probe = false;
if (protocol.equalsIgnoreCase("udp")) {
if (udpScrapeEnabled) {
udpScrapeURL = reqUrl;
} else {
throw (new IOException("UDP Tracker protocol disabled"));
}
} else if (protocol.equalsIgnoreCase("http") && !az_tracker && scrapeCount % autoUDPscrapeEvery == 0 && udpProbeEnabled && udpScrapeEnabled) {
String tracker_network = AENetworkClassifier.categoriseAddress(reqUrl.getHost());
if (tracker_network == AENetworkClassifier.AT_PUBLIC) {
udpScrapeURL = new URL(reqUrl.toString().replaceFirst("^http", "udp"));
auto_probe = true;
}
}
if (udpScrapeURL == null) {
if (!az_tracker && !tcpScrapeEnabled) {
String tracker_network = AENetworkClassifier.categoriseAddress(reqUrl.getHost());
if (tracker_network == AENetworkClassifier.AT_PUBLIC) {
throw (new IOException("HTTP Tracker protocol disabled"));
}
}
}
try {
// set context in case authentication dialog is required
TorrentUtils.setTLSTorrentHash(one_of_the_hashes);
if (udpScrapeURL != null) {
activeResponses = responsesForUDP;
boolean success = scrapeUDP(reqUrl, message, hashesForUDP, !auto_probe);
if ((!success || message.size() == 0) && !protocol.equalsIgnoreCase("udp")) {
// automatic UDP probe failed, use HTTP again
udpScrapeURL = null;
message.reset();
if (autoUDPscrapeEvery < 16)
autoUDPscrapeEvery <<= 1;
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_INFORMATION, "redirection of http scrape [" + scrapeURL + "] to udp failed, will retry in " + autoUDPscrapeEvery + " scrapes"));
} else if (success && !protocol.equalsIgnoreCase("udp")) {
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_INFORMATION, "redirection of http scrape [" + scrapeURL + "] to udp successful"));
autoUDPscrapeEvery = 1;
TRTrackerUtils.setUDPProbeResult(reqUrl, true);
}
}
scrapeCount++;
if (udpScrapeURL == null) {
activeResponses = responsesInQuery;
redirect_url = scrapeHTTP(hashesInQuery, reqUrl, message);
}
} finally {
TorrentUtils.setTLSTorrentHash(null);
}
scrape_reply = message.toByteArray();
Map map = BDecoder.decode(scrape_reply);
boolean this_is_az_tracker = map.get("aztracker") != null;
if (az_tracker != this_is_az_tracker) {
az_tracker = this_is_az_tracker;
TRTrackerUtils.setAZTracker(tracker_url, az_tracker);
}
Map mapFiles = (Map) map.get("files");
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "Response from scrape interface " + scrapeURL + ": " + ((mapFiles == null) ? "null" : "" + mapFiles.size()) + " returned"));
int iMinRequestInterval = 0;
if (map != null) {
/* "The spec":
* files
* infohash
* complete
* incomplete
* downloaded
* name
* flags
* min_request_interval
* failure reason
*/
/*
* files infohash complete incomplete downloaded name flags
* min_request_interval
*/
Map mapFlags = (Map) map.get("flags");
if (mapFlags != null) {
Long longScrapeValue = (Long) mapFlags.get("min_request_interval");
if (longScrapeValue != null)
iMinRequestInterval = longScrapeValue.intValue();
// Tracker owners want this log entry
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "Received min_request_interval of " + iMinRequestInterval));
}
}
if (mapFiles == null || mapFiles.size() == 0) {
if (bSingleHashScrapes && map.containsKey("complete") && map.containsKey("incomplete")) {
int complete = MapUtils.getMapInt(map, "complete", 0);
int incomplete = MapUtils.getMapInt(map, "incomplete", 0);
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) activeResponses.get(0);
response.setPeers(incomplete);
response.setSeeds(complete);
int minRequestInterval = MapUtils.getMapInt(map, "interval", FAULTY_SCRAPE_RETRY_INTERVAL);
int scrapeInterval = TRTrackerScraperResponseImpl.calcScrapeIntervalSecs(minRequestInterval, complete);
long nextScrapeTime = SystemTime.getCurrentTime() + (scrapeInterval * 1000);
response.setNextScrapeStartTime(nextScrapeTime);
response.setStatus(TRTrackerScraperResponse.ST_ONLINE, "Tracker returned Announce from scrape call");
response.setScrapeStartTime(scrapeStartTime);
scraper.scrapeReceived(response);
return;
}
// custom extension here to handle "failure reason" returned for
// scrapes
byte[] failure_reason_bytes = map == null ? null : (byte[]) map.get("failure reason");
if (failure_reason_bytes != null) {
long nextScrapeTime = SystemTime.getCurrentTime() + ((iMinRequestInterval == 0) ? FAULTY_SCRAPE_RETRY_INTERVAL : iMinRequestInterval * 1000);
for (TRTrackerScraperResponseImpl response : activeResponses) {
response.setNextScrapeStartTime(nextScrapeTime);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + new String(failure_reason_bytes, Constants.DEFAULT_ENCODING));
// notifiy listeners
scraper.scrapeReceived(response);
}
} else {
if (activeResponses.size() > 1) {
// multi were requested, 0 returned. Therefore, multi not
// supported
bSingleHashScrapes = true;
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, scrapeURL + " doesn't properly support " + "multi-hash scrapes"));
for (TRTrackerScraperResponseImpl response : activeResponses) {
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "invalid"));
// notifiy listeners
scraper.scrapeReceived(response);
}
} else {
long nextScrapeTime = SystemTime.getCurrentTime() + ((iMinRequestInterval == 0) ? NOHASH_RETRY_INTERVAL : iMinRequestInterval * 1000);
// 1 was requested, 0 returned. Therefore, hash not found.
TRTrackerScraperResponseImpl response = (TRTrackerScraperResponseImpl) activeResponses.get(0);
response.setNextScrapeStartTime(nextScrapeTime);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "nohash"));
// notifiy listeners
scraper.scrapeReceived(response);
}
}
return;
}
/*
* If we requested mutliple hashes, but only one was returned, revert
* to Single Hash Scrapes, but continue on to process the one has that
* was returned (it may be a random one from the list)
*/
if (!bSingleHashScrapes && activeResponses.size() > 1 && mapFiles.size() == 1) {
bSingleHashScrapes = true;
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, scrapeURL + " only returned " + mapFiles.size() + " hash scrape(s), but we asked for " + activeResponses.size()));
}
for (TRTrackerScraperResponseImpl response : activeResponses) {
// LGLogger.log( "decoding response #" +i+ ": " +
// ByteFormatter.nicePrint( response.getHash(), true ) );
// retrieve the scrape data for the relevent infohash
Map scrapeMap = (Map) mapFiles.get(new String(response.getHash().getBytes(), Constants.BYTE_ENCODING));
if (scrapeMap == null) {
// some trackers that return only 1 hash return a random one!
if (activeResponses.size() == 1 || mapFiles.size() != 1) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + NOHASH_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "nohash"));
// notifiy listeners
scraper.scrapeReceived(response);
} else if (scraper.isTorrentScrapable(response.getHash())) {
// This tracker doesn't support multiple hash requests.
// revert status to what it was
response.revertStatus();
if (response.getStatus() == TRTrackerScraperResponse.ST_SCRAPING) {
// System.out.println("Hash " +
// ByteFormatter.nicePrint(response.getHash(), true) + "
// mysteriously reverted to ST_SCRAPING!");
// response.setStatus(TRTrackerScraperResponse.ST_ONLINE, "");
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "invalid"));
} else {
// force single-hash scrapes here
bSingleHashScrapes = true;
if (original_bSingleHashScrapes) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
}
}
// notifiy listeners
scraper.scrapeReceived(response);
// if this was the first scrape request in the list,
// TrackerChecker
// will attempt to scrape again because we didn't reset the
// nextscrapestarttime. But the next time, bSingleHashScrapes
// will be true, and only 1 has will be requested, so there
// will not be infinite looping
}
// System.out.println("scrape: hash missing from reply");
} else {
// retrieve values
Long l_seeds = (Long) scrapeMap.get("complete");
Long l_peers = (Long) scrapeMap.get("incomplete");
Long l_comp = (Long) scrapeMap.get("downloaded");
// expected but deal with missing as some trackers ommit :(
int seeds = l_seeds == null ? 0 : l_seeds.intValue();
// expected but deal with missing
int peers = l_peers == null ? 0 : l_peers.intValue();
// optional
int completed = l_comp == null ? -1 : l_comp.intValue();
// make sure we dont use invalid replies
if (seeds < 0 || peers < 0 || completed < -1) {
if (Logger.isEnabled()) {
HashWrapper hash = response.getHash();
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, "Invalid scrape response from '" + reqUrl + "': map = " + scrapeMap));
}
// manager will run scrapes for each individual hash.
if (activeResponses.size() > 1 && bSingleHashScrapes) {
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "invalid"));
scraper.scrapeReceived(response);
continue;
}
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString(SSErr + "invalid") + " " + (seeds < 0 ? MessageText.getString("MyTorrentsView.seeds") + " == " + seeds + ". " : "") + (peers < 0 ? MessageText.getString("MyTorrentsView.peers") + " == " + peers + ". " : "") + (completed < 0 ? MessageText.getString("MyTorrentsView.completed") + " == " + completed + ". " : ""));
scraper.scrapeReceived(response);
continue;
}
int scrapeInterval = TRTrackerScraperResponseImpl.calcScrapeIntervalSecs(iMinRequestInterval, seeds);
long nextScrapeTime = SystemTime.getCurrentTime() + (scrapeInterval * 1000);
response.setNextScrapeStartTime(nextScrapeTime);
// create the response
response.setScrapeStartTime(scrapeStartTime);
response.setSeeds(seeds);
response.setPeers(peers);
response.setCompleted(completed);
response.setStatus(TRTrackerScraperResponse.ST_ONLINE, MessageText.getString(SS + "ok"));
// notifiy listeners
scraper.scrapeReceived(response);
try {
if (activeResponses.size() == 1 && redirect_url != null) {
// we only deal with redirects for single urls - if the tracker wants to
// redirect one of a group is has to force single-hash scrapes anyway
String redirect_str = redirect_url.toString();
int s_pos = redirect_str.indexOf("/scrape");
if (s_pos != -1) {
URL new_url = new URL(redirect_str.substring(0, s_pos) + "/announce" + redirect_str.substring(s_pos + 7));
if (scraper.redirectTrackerUrl(response.getHash(), tracker_url, new_url)) {
removeHash(response.getHash());
}
}
}
} catch (Throwable e) {
Debug.printStackTrace(e);
}
}
}
// for responses
} catch (NoClassDefFoundError ignoreSSL) {
// javax/net/ssl/SSLSocket
for (TRTrackerScraperResponseImpl response : activeResponses) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + ignoreSSL.getMessage());
// notifiy listeners
scraper.scrapeReceived(response);
}
} catch (FileNotFoundException e) {
for (TRTrackerScraperResponseImpl response : activeResponses) {
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + MessageText.getString("DownloadManager.error.filenotfound"));
// notifiy listeners
scraper.scrapeReceived(response);
}
} catch (SocketException e) {
setAllError(activeResponses, e);
} catch (SocketTimeoutException e) {
setAllError(activeResponses, e);
} catch (UnknownHostException e) {
setAllError(activeResponses, e);
} catch (PRUDPPacketHandlerException e) {
setAllError(activeResponses, e);
} catch (BEncodingException e) {
setAllError(activeResponses, e);
} catch (Exception e) {
// for apache we can get error 414 - URL too long. simplest solution
// for this
// is to fall back to single scraping
String error_message = e.getMessage();
if (error_message != null) {
if (error_message.contains(" 500 ") || error_message.contains(" 400 ") || error_message.contains(" 403 ") || error_message.contains(" 404 ") || error_message.contains(" 501 ")) {
// various errors that have a 99% chance of happening on
// any other scrape request
setAllError(activeResponses, e);
return;
}
if (error_message.contains("414") && !bSingleHashScrapes) {
bSingleHashScrapes = true;
// Skip the setuing up the response. We want to scrape again
return;
}
}
String msg = Debug.getNestedExceptionMessage(e);
if (scrape_reply != null) {
String trace_data;
if (scrape_reply.length <= 150) {
trace_data = new String(scrape_reply);
} else {
trace_data = new String(scrape_reply, 0, 150) + "...";
}
msg += " [" + trace_data + "]";
}
for (TRTrackerScraperResponseImpl response : activeResponses) {
if (Logger.isEnabled()) {
HashWrapper hash = response.getHash();
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, LogEvent.LT_ERROR, "Error from scrape interface " + scrapeURL + " : " + msg + " (" + e.getClass() + ")"));
}
response.setNextScrapeStartTime(SystemTime.getCurrentTime() + FAULTY_SCRAPE_RETRY_INTERVAL);
response.setStatus(TRTrackerScraperResponse.ST_ERROR, MessageText.getString(SS + "error") + msg);
// notifiy listeners
scraper.scrapeReceived(response);
}
}
} catch (Throwable t) {
Debug.out("runScrapesSupport failed", t);
} finally {
numActiveScrapes.decrementAndGet();
}
}
use of com.biglybt.core.logging.LogEvent in project BiglyBT by BiglySoftware.
the class TrackerStatus method scrapeUDP.
protected boolean scrapeUDP(URL reqUrl, ByteArrayOutputStream message, List hashes, boolean do_auth_test) throws Exception {
Map rootMap = new HashMap();
Map files = new ByteEncodedKeyHashMap();
rootMap.put("files", files);
/*
* reduce network traffic by only scraping UDP when the torrent isn't
* running as UDP version 2 contains scrape data in the announce
* response
*/
/* removed implementation for the time being
for (Iterator it = hashes.iterator(); it.hasNext();)
{
HashWrapper hash = (HashWrapper) it.next();
if (PRUDPPacketTracker.VERSION == 2 && scraper.isTorrentDownloading(hash))
{
if (Logger.isEnabled())
Logger.log(new LogEvent(TorrentUtils.getDownloadManager(hash), LOGID, LogEvent.LT_WARNING, "Scrape of " + reqUrl + " skipped as torrent running and " + "therefore scrape data available in " + "announce replies"));
// easiest approach here is to brew up a response that looks like the current one
Map file = new HashMap();
byte[] resp_hash = hash.getBytes();
// System.out.println("got hash:" + ByteFormatter.nicePrint( resp_hash, true ));
files.put(new String(resp_hash, Constants.BYTE_ENCODING), file);
file.put("complete", new Long(current_response.getSeeds()));
file.put("downloaded", new Long(-1)); // unknown
file.put("incomplete", new Long(current_response.getPeers()));
byte[] data = BEncoder.encode(rootMap);
message.write(data);
return true;
}
}
*/
reqUrl = TRTrackerUtils.adjustURLForHosting(reqUrl);
PasswordAuthentication auth = null;
boolean auth_ok = false;
try {
if (do_auth_test && UrlUtils.queryHasParameter(reqUrl.getQuery(), "auth", false)) {
auth = SESecurityManager.getPasswordAuthentication("UDP Tracker", reqUrl);
}
int port = UDPNetworkManager.getSingleton().getUDPNonDataListeningPortNumber();
PRUDPPacketHandler handler = PRUDPPacketHandlerFactory.getHandler(port);
InetSocketAddress destination = new InetSocketAddress(reqUrl.getHost(), reqUrl.getPort() == -1 ? 80 : reqUrl.getPort());
handler = handler.openSession(destination);
try {
String failure_reason = null;
for (int retry_loop = 0; retry_loop < PRUDPPacketTracker.DEFAULT_RETRY_COUNT; retry_loop++) {
try {
PRUDPPacket connect_request = new PRUDPPacketRequestConnect();
PRUDPPacket reply = handler.sendAndReceive(auth, connect_request, destination);
if (reply.getAction() == PRUDPPacketTracker.ACT_REPLY_CONNECT) {
PRUDPPacketReplyConnect connect_reply = (PRUDPPacketReplyConnect) reply;
long my_connection = connect_reply.getConnectionId();
PRUDPPacketRequestScrape scrape_request = new PRUDPPacketRequestScrape(my_connection, hashes);
reply = handler.sendAndReceive(auth, scrape_request, destination);
if (reply.getAction() == PRUDPPacketTracker.ACT_REPLY_SCRAPE) {
auth_ok = true;
if (PRUDPPacketTracker.VERSION == 1) {
PRUDPPacketReplyScrape scrape_reply = (PRUDPPacketReplyScrape) reply;
/*
int interval = scrape_reply.getInterval();
if ( interval != 0 ){
map.put( "interval", new Long(interval ));
}
*/
byte[][] reply_hashes = scrape_reply.getHashes();
int[] complete = scrape_reply.getComplete();
int[] downloaded = scrape_reply.getDownloaded();
int[] incomplete = scrape_reply.getIncomplete();
for (int i = 0; i < reply_hashes.length; i++) {
Map file = new HashMap();
byte[] resp_hash = reply_hashes[i];
// System.out.println("got hash:" + ByteFormatter.nicePrint( resp_hash, true ));
files.put(new String(resp_hash, Constants.BYTE_ENCODING), file);
file.put("complete", new Long(complete[i]));
file.put("downloaded", new Long(downloaded[i]));
file.put("incomplete", new Long(incomplete[i]));
}
byte[] data = BEncoder.encode(rootMap);
message.write(data);
return true;
} else {
PRUDPPacketReplyScrape2 scrape_reply = (PRUDPPacketReplyScrape2) reply;
/*
int interval = scrape_reply.getInterval();
if ( interval != 0 ){
map.put( "interval", new Long(interval ));
}
*/
int[] complete = scrape_reply.getComplete();
int[] downloaded = scrape_reply.getDownloaded();
int[] incomplete = scrape_reply.getIncomplete();
int i = 0;
for (Iterator it = hashes.iterator(); it.hasNext() && i < complete.length; i++) {
HashWrapper hash = (HashWrapper) it.next();
Map file = new HashMap();
file.put("complete", new Long(complete[i]));
file.put("downloaded", new Long(downloaded[i]));
file.put("incomplete", new Long(incomplete[i]));
files.put(new String(hash.getBytes(), Constants.BYTE_ENCODING), file);
}
// System.out.println("got hash:" + ByteFormatter.nicePrint( resp_hash, true ));
byte[] data = BEncoder.encode(rootMap);
message.write(data);
return true;
}
} else {
failure_reason = ((PRUDPPacketReplyError) reply).getMessage();
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_ERROR, "Response from scrape interface " + reqUrl + " : " + failure_reason));
break;
}
} else {
failure_reason = ((PRUDPPacketReplyError) reply).getMessage();
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_ERROR, "Response from scrape interface " + reqUrl + " : " + ((PRUDPPacketReplyError) reply).getMessage()));
break;
}
} catch (PRUDPPacketHandlerException e) {
if (e.getMessage() == null || !e.getMessage().contains("timed out")) {
throw (e);
}
failure_reason = "Timeout";
}
}
if (failure_reason != null) {
rootMap.put("failure reason", failure_reason.getBytes());
rootMap.remove("files");
byte[] data = BEncoder.encode(rootMap);
message.write(data);
}
} finally {
handler.closeSession();
}
return false;
} finally {
if (auth != null) {
SESecurityManager.setPasswordAuthenticationOutcome(TRTrackerBTAnnouncerImpl.UDP_REALM, reqUrl, auth_ok);
}
}
}
use of com.biglybt.core.logging.LogEvent in project BiglyBT by BiglySoftware.
the class TRTrackerDHTAnnouncerImpl method setAnnounceResult.
@Override
public void setAnnounceResult(DownloadAnnounceResult result) {
last_update_time = SystemTime.getCurrentTime();
TRTrackerAnnouncerResponseImpl response;
if (result.getResponseType() == DownloadAnnounceResult.RT_ERROR) {
tracker_status_str = MessageText.getString("PeerManager.status.error");
String reason = result.getError();
if (reason != null) {
tracker_status_str += " (" + reason + ")";
}
response = new TRTrackerAnnouncerResponseImpl(result.getURL(), torrent_hash, TRTrackerAnnouncerResponse.ST_OFFLINE, result.getTimeToWait(), reason);
} else {
DownloadAnnounceResultPeer[] ext_peers = result.getPeers();
List<TRTrackerAnnouncerResponsePeerImpl> peers_list = new ArrayList<>(ext_peers.length);
for (int i = 0; i < ext_peers.length; i++) {
DownloadAnnounceResultPeer ext_peer = ext_peers[i];
if (ext_peer == null) {
continue;
}
if (Logger.isEnabled()) {
Logger.log(new LogEvent(torrent, LOGID, "EXTERNAL PEER DHT: ip=" + ext_peer.getAddress() + ",port=" + ext_peer.getPort() + ",prot=" + ext_peer.getProtocol()));
}
int http_port = 0;
byte az_version = TRTrackerAnnouncer.AZ_TRACKER_VERSION_1;
peers_list.add(new TRTrackerAnnouncerResponsePeerImpl(ext_peer.getSource(), ext_peer.getPeerID(), ext_peer.getAddress(), ext_peer.getPort(), ext_peer.getUDPPort(), http_port, ext_peer.getProtocol(), az_version, (short) 0));
}
TRTrackerAnnouncerResponsePeerImpl[] peers = peers_list.toArray(new TRTrackerAnnouncerResponsePeerImpl[peers_list.size()]);
helper.addToTrackerCache(peers);
tracker_status_str = MessageText.getString("PeerManager.status.ok");
response = new TRTrackerAnnouncerResponseImpl(result.getURL(), torrent_hash, TRTrackerAnnouncerResponse.ST_ONLINE, result.getTimeToWait(), peers);
}
last_response = response;
TRTrackerAnnouncerResponsePeer[] peers = response.getPeers();
if (peers == null || peers.length < 5) {
TRTrackerAnnouncerResponsePeer[] cached_peers = helper.getPeersFromCache(100);
if (cached_peers.length > 0) {
Set<TRTrackerAnnouncerResponsePeer> new_peers = new TreeSet<>(new Comparator<TRTrackerAnnouncerResponsePeer>() {
@Override
public int compare(TRTrackerAnnouncerResponsePeer o1, TRTrackerAnnouncerResponsePeer o2) {
return (o1.compareTo(o2));
}
});
if (peers != null) {
new_peers.addAll(Arrays.asList(peers));
}
new_peers.addAll(Arrays.asList(cached_peers));
response.setPeers(new_peers.toArray(new TRTrackerAnnouncerResponsePeer[new_peers.size()]));
}
}
helper.informResponse(this, response);
}
use of com.biglybt.core.logging.LogEvent in project BiglyBT by BiglySoftware.
the class NonDaemonTaskRunner method waitUntilIdleSupport.
protected void waitUntilIdleSupport() {
AESemaphore sem;
try {
tasks_mon.enter();
if (Logger.isEnabled()) {
String str = "";
for (taskWrapper t : tasks) {
str += (str.isEmpty() ? "" : ",") + t.getName();
}
Logger.log(new LogEvent(LogIDs.CORE, "Non-daemon wait for idle: thread=" + current_thread + ", tasks=" + str));
}
if (current_thread == null) {
return;
}
sem = new AESemaphore("NDTR::idleWaiter");
wait_until_idle_list.add(sem);
} finally {
tasks_mon.exit();
}
while (true) {
if (sem.reserve(2500)) {
break;
}
if (Logger.isEnabled()) {
try {
tasks_mon.enter();
String str = "";
for (taskWrapper t : tasks) {
str += (str.isEmpty() ? "" : ",") + t.getName();
}
Logger.log(new LogEvent(LogIDs.CORE, "Non-daemon wait for idle 2: thread=" + current_thread + ", tasks=" + str));
for (int i = 0; i < wait_until_idle_list.size(); i++) {
AESemaphore pending = (AESemaphore) wait_until_idle_list.get(i);
if (pending != sem) {
Logger.log(new LogEvent(LogIDs.CORE, "Waiting for " + pending.getName() + " to complete"));
}
}
} finally {
tasks_mon.exit();
}
}
}
}
use of com.biglybt.core.logging.LogEvent in project BiglyBT by BiglySoftware.
the class PluginInitializer method loadPluginFromDir.
private List loadPluginFromDir(File directory, boolean bSkipAlreadyLoaded, boolean loading_for_startup, // initialise setting is used if loading_for_startup isnt
boolean initialise) throws PluginException {
List loaded_pis = new ArrayList();
ClassLoader plugin_class_loader = root_class_loader;
if (!directory.isDirectory()) {
return (loaded_pis);
}
String pluginName = directory.getName();
File[] pluginContents = directory.listFiles();
if (pluginContents == null || pluginContents.length == 0) {
return (loaded_pis);
}
// first sanity check - dir must include either a plugin.properties or
// at least one .jar file
boolean looks_like_plugin = false;
for (int i = 0; i < pluginContents.length; i++) {
String name = pluginContents[i].getName().toLowerCase();
if (name.endsWith(".jar") || name.equals("plugin.properties")) {
looks_like_plugin = true;
break;
}
}
if (!looks_like_plugin) {
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, LogEvent.LT_WARNING, "Plugin directory '" + directory + "' has no plugin.properties " + "or .jar files, skipping"));
return (loaded_pis);
}
// take only the highest version numbers of jars that look versioned
String[] plugin_version = { null };
String[] plugin_id = { null };
pluginContents = PluginLauncherImpl.getHighestJarVersions(pluginContents, plugin_version, plugin_id, true);
for (int i = 0; i < pluginContents.length; i++) {
File jar_file = pluginContents[i];
if (pluginContents.length > 1) {
String name = jar_file.getName();
if (name.startsWith("i18nPlugin_")) {
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "renaming '" + name + "' to conform with versioning system"));
jar_file.renameTo(new File(jar_file.getParent(), "i18nAZ_0.1.jar "));
continue;
}
}
plugin_class_loader = PluginLauncherImpl.addFileToClassPath(root_class_loader, plugin_class_loader, jar_file);
}
String plugin_class_string = null;
try {
Properties props = new Properties();
File properties_file = new File(directory.toString() + File.separator + "plugin.properties");
try {
if (properties_file.exists()) {
FileInputStream fis = null;
try {
fis = new FileInputStream(properties_file);
props.load(fis);
} finally {
if (fis != null) {
fis.close();
}
}
} else {
if (plugin_class_loader instanceof URLClassLoader) {
URLClassLoader current = (URLClassLoader) plugin_class_loader;
URL url = current.findResource("plugin.properties");
if (url != null) {
URLConnection connection = url.openConnection();
InputStream is = connection.getInputStream();
props.load(is);
} else {
throw (new Exception("failed to load plugin.properties from jars"));
}
} else {
throw (new Exception("failed to load plugin.properties from dir or jars"));
}
}
} catch (Throwable e) {
Debug.printStackTrace(e);
String msg = "Can't read 'plugin.properties' for plugin '" + pluginName + "': file may be missing";
Logger.log(new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_ERROR, msg));
System.out.println(msg);
throw (new PluginException(msg, e));
}
checkJDKVersion(pluginName, props, true);
checkCoreAppVersion(pluginName, props, true);
plugin_class_string = (String) props.get("plugin.class");
if (plugin_class_string == null) {
plugin_class_string = (String) props.get("plugin.classes");
if (plugin_class_string == null) {
// set so we don't bork later will npe
plugin_class_string = "";
}
}
String plugin_name_string = (String) props.get("plugin.name");
if (plugin_name_string == null) {
plugin_name_string = (String) props.get("plugin.names");
}
int pos1 = 0;
int pos2 = 0;
while (true) {
int p1 = plugin_class_string.indexOf(";", pos1);
String plugin_class;
if (p1 == -1) {
plugin_class = plugin_class_string.substring(pos1).trim();
} else {
plugin_class = plugin_class_string.substring(pos1, p1).trim();
pos1 = p1 + 1;
}
PluginInterfaceImpl existing_pi = getPluginFromClass(plugin_class);
if (existing_pi != null) {
if (bSkipAlreadyLoaded) {
break;
}
// allow user dir entries to override app dir entries without warning
File this_parent = directory.getParentFile();
File existing_parent = null;
if (existing_pi.getInitializerKey() instanceof File) {
existing_parent = ((File) existing_pi.getInitializerKey()).getParentFile();
}
if (this_parent.equals(FileUtil.getApplicationFile("plugins")) && existing_parent != null && existing_parent.equals(FileUtil.getUserFile("plugins"))) {
if (Logger.isEnabled())
Logger.log(new LogEvent(LOGID, "Plugin '" + plugin_name_string + "/" + plugin_class + ": shared version overridden by user-specific one"));
return (new ArrayList());
} else {
Logger.log(new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_WARNING, "Error loading '" + plugin_name_string + "', plugin class '" + plugin_class + "' is already loaded"));
}
} else {
String plugin_name = null;
if (plugin_name_string != null) {
int p2 = plugin_name_string.indexOf(";", pos2);
if (p2 == -1) {
plugin_name = plugin_name_string.substring(pos2).trim();
} else {
plugin_name = plugin_name_string.substring(pos2, p2).trim();
pos2 = p2 + 1;
}
}
Properties new_props = (Properties) props.clone();
new_props.put("plugin.class", plugin_class);
if (plugin_name != null) {
new_props.put("plugin.name", plugin_name);
}
// System.out.println( "loading plugin '" + plugin_class + "' using cl " + classLoader);
// if the plugin load fails we still need to generate a plugin entry
// as this drives the upgrade process
Throwable load_failure = null;
String pid = plugin_id[0] == null ? directory.getName() : plugin_id[0];
List<File> verified_files = null;
Plugin plugin = null;
if (vc_disabled_plugins.contains(pid)) {
log("Plugin '" + pid + "' has been administratively disabled");
} else {
try {
String cl_key = "plugin.cl.ext." + pid;
String str = COConfigurationManager.getStringParameter(cl_key, null);
if (str != null && str.length() > 0) {
COConfigurationManager.removeParameter(cl_key);
plugin_class_loader = PluginLauncherImpl.extendClassLoader(root_class_loader, plugin_class_loader, new URL(str));
}
} catch (Throwable e) {
}
if (pid.endsWith("_v")) {
verified_files = new ArrayList<>();
// re-verify jar files
log("Re-verifying " + pid);
for (int i = 0; i < pluginContents.length; i++) {
File jar_file = pluginContents[i];
if (jar_file.getName().endsWith(".jar")) {
try {
log(" verifying " + jar_file);
AEVerifier.verifyData(jar_file);
verified_files.add(jar_file);
log(" OK");
} catch (Throwable e) {
String msg = "Error loading plugin '" + pluginName + "' / '" + plugin_class_string + "'";
Logger.log(new LogAlert(LogAlert.UNREPEATABLE, msg, e));
plugin = new FailedPlugin(plugin_name, directory.getAbsolutePath());
}
}
}
}
if (plugin == null) {
plugin = PluginLauncherImpl.getPreloadedPlugin(plugin_class);
if (plugin == null) {
try {
try {
Class<Plugin> c = (Class<Plugin>) PlatformManagerFactory.getPlatformManager().loadClass(plugin_class_loader, plugin_class);
// Class c = plugin_class_loader.loadClass(plugin_class);
plugin = c.newInstance();
try {
if (plugin_class_loader instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) plugin_class_loader).getURLs();
for (URL u : urls) {
String path = u.getPath();
if (path.endsWith(".jar")) {
int s1 = path.lastIndexOf('/');
int s2 = path.lastIndexOf('\\');
path = path.substring(Math.max(s1, s2) + 1);
s2 = path.indexOf('_');
if (s2 > 0) {
path = path.substring(0, s2);
path = path.replaceAll("-", "");
String cl = "plugin.preinit." + pid + ".PI" + path;
try {
Class pic = plugin_class_loader.loadClass(cl);
if (pic != null) {
pic.newInstance();
}
} catch (Throwable e) {
}
}
}
}
}
} catch (Throwable e) {
}
} catch (PlatformManagerException e) {
throw (e.getCause());
}
} catch (java.lang.UnsupportedClassVersionError e) {
plugin = new FailedPlugin(plugin_name, directory.getAbsolutePath());
// shorten stack trace
load_failure = new UnsupportedClassVersionError(e.getMessage());
} catch (Throwable e) {
if (e instanceof ClassNotFoundException && props.getProperty("plugin.install_if_missing", "no").equalsIgnoreCase("yes")) {
// don't report the failure
} else {
load_failure = e;
}
plugin = new FailedPlugin(plugin_name, directory.getAbsolutePath());
}
} else {
plugin_class_loader = plugin.getClass().getClassLoader();
}
}
MessageText.integratePluginMessages((String) props.get("plugin.langfile"), plugin_class_loader);
PluginInterfaceImpl plugin_interface = new PluginInterfaceImpl(plugin, this, directory, plugin_class_loader, verified_files, // key for config values
directory.getName(), new_props, directory.getAbsolutePath(), pid, plugin_version[0]);
boolean bEnabled = (loading_for_startup) ? plugin_interface.getPluginState().isLoadedAtStartup() : initialise;
plugin_interface.getPluginState().setDisabled(!bEnabled);
try {
Method load_method = plugin.getClass().getMethod("load", new Class[] { PluginInterface.class });
load_method.invoke(plugin, new Object[] { plugin_interface });
} catch (NoSuchMethodException e) {
} catch (Throwable e) {
load_failure = e;
}
loaded_pis.add(plugin_interface);
if (load_failure != null) {
plugin_interface.setAsFailed();
if (!pid.equals(UpdaterUpdateChecker.getPluginID())) {
String msg = MessageText.getString("plugin.init.load.failed", new String[] { plugin_name == null ? pluginName : plugin_name, directory.getAbsolutePath() });
LogAlert la;
if (load_failure instanceof UnsupportedClassVersionError) {
la = new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_ERROR, msg + ".\n\n" + MessageText.getString("plugin.install.class_version_error"));
} else if (load_failure instanceof ClassNotFoundException) {
la = new LogAlert(LogAlert.UNREPEATABLE, LogAlert.AT_ERROR, msg + ".\n\n" + MessageText.getString("plugin.init.load.failed.classmissing") + "\n\n", load_failure);
} else {
la = new LogAlert(LogAlert.UNREPEATABLE, msg, load_failure);
}
Logger.log(la);
System.out.println(msg + ": " + load_failure);
}
}
}
}
if (p1 == -1) {
break;
}
}
return (loaded_pis);
} catch (Throwable e) {
if (e instanceof PluginException) {
throw ((PluginException) e);
}
Debug.printStackTrace(e);
String msg = "Error loading plugin '" + pluginName + "' / '" + plugin_class_string + "'";
Logger.log(new LogAlert(LogAlert.UNREPEATABLE, msg, e));
System.out.println(msg + ": " + e);
throw (new PluginException(msg, e));
}
}
Aggregations