use of com.biglybt.core.tracker.server.TRTrackerServerPeer in project BiglyBT by BiglySoftware.
the class TRTrackerServerProcessor method processTrackerRequest.
protected TRTrackerServerTorrentImpl processTrackerRequest(TRTrackerServerImpl _server, String request, // output
Map[] root_out, // output
TRTrackerServerPeerImpl[] peer_out, int _request_type, byte[][] hashes, String link, String scrape_flags, HashWrapper peer_id, boolean no_peer_id, byte compact_mode, String key, String event, boolean stop_to_queue, int port, int udp_port, int http_port, String real_ip_address, String original_client_ip_address, long downloaded, long uploaded, long left, int num_want, byte crypto_level, byte az_ver, int up_speed, DHTNetworkPosition network_position) throws TRTrackerServerException {
server = _server;
request_type = _request_type;
if (!server.isReady()) {
throw (new TRTrackerServerException("Tracker initialising, please wait"));
}
start = SystemTime.getHighPrecisionCounter();
boolean ip_override = real_ip_address != original_client_ip_address;
boolean loopback = TRTrackerUtils.isLoopback(real_ip_address);
if (loopback) {
// any override is purely for routing purposes for loopback connections and we don't
// want to apply the ip-override precedence rules against us
ip_override = false;
}
// translate any 127.0.0.1 local addresses back to the tracker address. Note this
// fixes up .i2p and onion addresses back to their real values when needed
String client_ip_address = TRTrackerUtils.adjustHostFromHosting(original_client_ip_address);
if (client_ip_address != original_client_ip_address) {
if (Logger.isEnabled()) {
Logger.log(new LogEvent(LogIDs.TRACKER, " address adjusted: original=" + original_client_ip_address + ", real=" + real_ip_address + ", adjusted=" + client_ip_address + ", loopback=" + loopback));
}
}
if (!TRTrackerServerImpl.getAllNetworksSupported()) {
String network = AENetworkClassifier.categoriseAddress(client_ip_address);
String[] permitted_networks = TRTrackerServerImpl.getPermittedNetworks();
boolean ok = false;
for (int i = 0; i < permitted_networks.length; i++) {
if (network == permitted_networks[i]) {
ok = true;
break;
}
}
if (!ok) {
throw (new TRTrackerServerException("Network '" + network + "' not supported"));
}
}
TRTrackerServerTorrentImpl torrent = null;
if (request_type != TRTrackerServerRequest.RT_FULL_SCRAPE) {
if (request_type == TRTrackerServerRequest.RT_ANNOUNCE) {
if (hashes == null || hashes.length == 0) {
throw (new TRTrackerServerException("Hash missing from request "));
}
if (hashes.length != 1) {
throw (new TRTrackerServerException("Too many hashes for announce"));
}
byte[] hash = hashes[0];
torrent = server.getTorrent(hash);
if (torrent == null) {
if (!COConfigurationManager.getBooleanParameter("Tracker Public Enable")) {
throw (new TRTrackerServerException("Torrent unauthorised"));
} else {
try {
torrent = (TRTrackerServerTorrentImpl) server.permit(real_ip_address, hash, false);
} catch (Throwable e) {
throw (new TRTrackerServerException("Torrent unauthorised", e));
}
}
}
if (peer_id == null) {
throw (new TRTrackerServerException("peer_id missing from request"));
}
boolean queue_it = stop_to_queue;
if (queue_it) {
Set biased = server.getBiasedPeers();
if (biased != null && biased.contains(real_ip_address)) {
// biased peers get to queue whatever
} else {
if (loopback || ip_override) {
queue_it = false;
}
}
}
long interval;
long min_interval;
if (queue_it) {
// when queued we use the scrape timeouts as it is scrape operations that
// will keep the entry alive from this point on
interval = server.getScrapeRetryInterval(torrent);
min_interval = server.getMinScrapeRetryInterval();
} else {
interval = server.getAnnounceRetryInterval(torrent);
min_interval = server.getMinAnnounceRetryInterval();
if (left == 0) {
long mult = server.getSeedAnnounceIntervalMultiplier();
interval *= mult;
min_interval *= mult;
}
}
TRTrackerServerPeerImpl peer = torrent.peerContact(request, event, peer_id, port, udp_port, http_port, crypto_level, az_ver, real_ip_address, client_ip_address, ip_override, loopback, key, uploaded, downloaded, left, interval, up_speed, network_position);
if (queue_it) {
torrent.peerQueued(client_ip_address, port, udp_port, http_port, crypto_level, az_ver, interval, left == 0);
}
HashMap pre_map = new HashMap();
TRTrackerServerPeer pre_process_peer = peer;
if (pre_process_peer == null) {
// can be null for stop events received without a previous start
pre_process_peer = new lightweightPeer(client_ip_address, port, peer_id);
}
server.preProcess(pre_process_peer, torrent, request_type, request, pre_map);
// set num_want to 0 for stopped events as no point in returning peers
boolean stopped = event != null && event.equalsIgnoreCase("stopped");
root_out[0] = torrent.exportAnnounceToMap(client_ip_address, pre_map, peer, left > 0, stopped ? 0 : num_want, interval, min_interval, no_peer_id, compact_mode, crypto_level, network_position);
peer_out[0] = peer;
} else if (request_type == TRTrackerServerRequest.RT_QUERY) {
if (link == null) {
if (hashes == null || hashes.length == 0) {
throw (new TRTrackerServerException("Hash missing from request "));
}
if (hashes.length != 1) {
throw (new TRTrackerServerException("Too many hashes for query"));
}
byte[] hash = hashes[0];
torrent = server.getTorrent(hash);
} else {
torrent = server.getTorrent(link);
}
if (torrent == null) {
throw (new TRTrackerServerException("Torrent unauthorised"));
}
long interval = server.getAnnounceRetryInterval(torrent);
root_out[0] = torrent.exportAnnounceToMap(client_ip_address, new HashMap(), null, true, num_want, interval, server.getMinAnnounceRetryInterval(), true, compact_mode, crypto_level, network_position);
} else {
if (hashes == null || hashes.length == 0) {
throw (new TRTrackerServerException("Hash missing from request "));
}
boolean local_scrape = client_ip_address.equals("127.0.0.1");
long max_interval = server.getMinScrapeRetryInterval();
Map root = new HashMap();
root_out[0] = root;
Map files = new ByteEncodedKeyHashMap();
root.put("files", files);
char[] scrape_chars = scrape_flags == null ? null : scrape_flags.toCharArray();
if (scrape_chars != null && scrape_chars.length != hashes.length) {
scrape_chars = null;
}
for (int i = 0; i < hashes.length; i++) {
byte[] hash = hashes[i];
String str_hash;
try {
str_hash = new String(hash, Constants.BYTE_ENCODING);
if (i > 0 && files.get(str_hash) != null) {
continue;
}
} catch (UnsupportedEncodingException e) {
continue;
}
torrent = server.getTorrent(hash);
if (torrent == null) {
if (!COConfigurationManager.getBooleanParameter("Tracker Public Enable")) {
continue;
} else {
try {
torrent = (TRTrackerServerTorrentImpl) server.permit(real_ip_address, hash, false);
} catch (Throwable e) {
continue;
}
}
}
long interval = server.getScrapeRetryInterval(torrent);
if (interval > max_interval) {
max_interval = interval;
}
if (scrape_chars != null && (QUEUE_TEST || !(loopback || ip_override))) {
if (scrape_chars[i] == 'Q') {
torrent.peerQueued(client_ip_address, port, udp_port, http_port, crypto_level, az_ver, (int) interval, true);
}
}
if (torrent.getRedirects() != null) {
if (hashes.length > 1) {
continue;
}
}
server.preProcess(new lightweightPeer(client_ip_address, port, peer_id), torrent, request_type, request, null);
// we don't cache local scrapes as if we do this causes the hosting of
// torrents to retrieve old values initially. Not a fatal error but not
// the best behaviour as the (local) seed isn't initially visible.
Map hash_entry = torrent.exportScrapeToMap(request, client_ip_address, !local_scrape);
// System.out.println( "tracker - encoding: " + ByteFormatter.nicePrint(torrent_hash) + " -> " + ByteFormatter.nicePrint( str_hash.getBytes( Constants.BYTE_ENCODING )));
files.put(str_hash, hash_entry);
}
if (hashes.length > 1) {
// no specific torrent
torrent = null;
}
// System.out.println( "scrape: hashes = " + hashes.length + ", files = " + files.size() + ", tim = " + max_interval );
addScrapeInterval(max_interval, root);
}
} else {
if (!TRTrackerServerImpl.isFullScrapeEnabled()) {
throw (new TRTrackerServerException("Full scrape disabled"));
}
Map files = new ByteEncodedKeyHashMap();
TRTrackerServerTorrentImpl[] torrents = server.getTorrents();
for (int i = 0; i < torrents.length; i++) {
TRTrackerServerTorrentImpl this_torrent = torrents[i];
if (this_torrent.getRedirects() != null) {
continue;
}
server.preProcess(new lightweightPeer(client_ip_address, port, peer_id), this_torrent, request_type, request, null);
byte[] torrent_hash = this_torrent.getHash().getHash();
try {
String str_hash = new String(torrent_hash, Constants.BYTE_ENCODING);
// System.out.println( "tracker - encoding: " + ByteFormatter.nicePrint(torrent_hash) + " -> " + ByteFormatter.nicePrint( str_hash.getBytes( Constants.BYTE_ENCODING )));
Map hash_entry = this_torrent.exportScrapeToMap(request, client_ip_address, true);
files.put(str_hash, hash_entry);
} catch (UnsupportedEncodingException e) {
throw (new TRTrackerServerException("Encoding error", e));
}
}
Map root = new HashMap();
root_out[0] = root;
addScrapeInterval(null, root);
root.put("files", files);
}
return (torrent);
}
use of com.biglybt.core.tracker.server.TRTrackerServerPeer in project BiglyBT by BiglySoftware.
the class TRTrackerServerProcessorTCP method processRequest.
protected boolean processRequest(String input_header, String lowercase_input_header, String url_path, InetSocketAddress local_address, InetSocketAddress remote_address, boolean announce_and_scrape_only, boolean keep_alive, InputStream is, OutputStream os, AsyncController async) throws IOException {
String str = url_path;
int request_type = TRTrackerServerRequest.RT_UNKNOWN;
boolean compact_enabled = server.isCompactEnabled();
try {
Map root = null;
TRTrackerServerTorrentImpl specific_torrent = null;
boolean gzip_reply = false;
boolean xml_output = false;
try {
List<String> banned = TRTrackerServerImpl.banned_clients;
if (!banned.isEmpty()) {
int ua_pos = lowercase_input_header.indexOf("user-agent");
if (ua_pos != -1) {
String user_agent = lowercase_input_header.substring(ua_pos + 10, lowercase_input_header.indexOf("\n", ua_pos)).trim().substring(1).trim();
for (String b : banned) {
if (user_agent.contains(b)) {
throw (new Exception(MSG_CLIENT_NOT_SUPPORTED));
}
}
}
}
if (str.startsWith("/announce?")) {
request_type = TRTrackerServerRequest.RT_ANNOUNCE;
str = str.substring(10);
} else if (str.startsWith("/scrape?")) {
request_type = TRTrackerServerRequest.RT_SCRAPE;
str = str.substring(8);
} else if (str.equals("/scrape")) {
request_type = TRTrackerServerRequest.RT_FULL_SCRAPE;
str = "";
} else if (str.startsWith("/query?")) {
request_type = TRTrackerServerRequest.RT_QUERY;
str = str.substring(7);
} else {
String redirect = TRTrackerServerImpl.redirect_on_not_found;
if (announce_and_scrape_only) {
if (redirect.length() == 0) {
throw (new Exception("Tracker only supports announce and scrape functions"));
}
} else {
setTaskState("external request");
disable_timeouts = true;
// check non-tracker authentication
String user = doAuthentication(remote_address, url_path, input_header, os, false);
if (user == null) {
return (false);
}
boolean[] ka = new boolean[] { keep_alive };
if (handleExternalRequest(local_address, remote_address, user, str, input_header, is, os, async, ka)) {
return (ka[0]);
}
}
if (redirect.length() > 0) {
os.write(("HTTP/1.1 301 Moved Permanently" + NL + "Location: " + redirect + NL + "Connection: close" + NL + "Content-Length: 0" + NL + NL).getBytes());
} else {
os.write(("HTTP/1.1 404 Not Found" + NL + "Connection: close" + NL + "Content-Length: 0" + NL + NL).getBytes());
}
os.flush();
// throw( new Exception( "Unsupported Request Type"));
return (false);
}
if (doAuthentication(remote_address, url_path, input_header, os, true) == null) {
return (false);
}
int enc_pos = lowercase_input_header.indexOf("accept-encoding:");
if (enc_pos != -1) {
int e_pos = input_header.indexOf(NL, enc_pos);
if (e_pos != -1) {
if (enc_pos > 0) {
char c = lowercase_input_header.charAt(enc_pos - 1);
if (c != FF && c != ' ') {
enc_pos = -1;
}
}
if (enc_pos != -1) {
String accept_encoding = lowercase_input_header.substring(enc_pos + 16, e_pos);
gzip_reply = HTTPUtils.canGZIP(accept_encoding);
}
}
}
setTaskState("decoding announce/scrape");
int pos = 0;
byte[] hash = null;
List hash_list = null;
String link = null;
HashWrapper peer_id = null;
int tcp_port = 0;
String event = null;
long uploaded = 0;
long downloaded = 0;
long left = 0;
int num_want = -1;
boolean no_peer_id = false;
byte compact_mode = TRTrackerServerTorrentImpl.COMPACT_MODE_NONE;
String key = null;
byte crypto_level = TRTrackerServerPeer.CRYPTO_NONE;
int crypto_port = 0;
int udp_port = 0;
int http_port = 0;
int az_ver = 0;
boolean stop_to_queue = false;
String scrape_flags = null;
int up_speed = 0;
boolean hide = false;
DHTNetworkPosition network_position = null;
String real_ip_address = AddressUtils.getHostAddress(remote_address);
String client_ip_address = real_ip_address;
while (pos < str.length()) {
int p1 = str.indexOf('&', pos);
String token;
if (p1 == -1) {
token = str.substring(pos);
} else {
token = str.substring(pos, p1);
pos = p1 + 1;
}
int p2 = token.indexOf('=');
if (p2 == -1) {
throw (new Exception("format invalid"));
}
String lhs = token.substring(0, p2).toLowerCase();
String rhs = URLDecoder.decode(token.substring(p2 + 1), Constants.BYTE_ENCODING);
if (lhs.equals("info_hash")) {
byte[] b = rhs.getBytes(Constants.BYTE_ENCODING);
if (hash == null) {
hash = b;
} else {
if (hash_list == null) {
hash_list = new ArrayList();
hash_list.add(hash);
}
hash_list.add(b);
}
} else if (lhs.equals("peer_id")) {
peer_id = new HashWrapper(rhs.getBytes(Constants.BYTE_ENCODING));
} else if (lhs.equals("no_peer_id")) {
no_peer_id = rhs.equals("1");
} else if (lhs.equals("compact")) {
if (compact_enabled) {
if (rhs.equals("1") && compact_mode == TRTrackerServerTorrentImpl.COMPACT_MODE_NONE) {
compact_mode = TRTrackerServerTorrentImpl.COMPACT_MODE_NORMAL;
}
}
} else if (lhs.equals("key")) {
if (server.isKeyEnabled()) {
key = rhs;
}
} else if (lhs.equals("port")) {
tcp_port = Integer.parseInt(rhs);
} else if (lhs.equals("event")) {
event = rhs;
} else if (lhs.equals("ip")) {
if (!HostNameToIPResolver.isNonDNSName(rhs)) {
for (int i = 0; i < rhs.length(); i++) {
char c = rhs.charAt(i);
if (c != '.' && c != ':' && !Character.isDigit(c)) {
throw (new Exception("IP override address must be resolved by the client"));
}
}
try {
rhs = HostNameToIPResolver.syncResolve(rhs).getHostAddress();
} catch (UnknownHostException e) {
throw (new Exception("IP override address must be resolved by the client"));
}
} else if (AENetworkClassifier.categoriseAddress(client_ip_address) == AENetworkClassifier.AT_I2P) {
// ignore ip override as it is probably a full destination whereas the real originator is the .b32 equivalent
} else {
client_ip_address = rhs;
}
} else if (lhs.equals("uploaded")) {
uploaded = Long.parseLong(rhs);
} else if (lhs.equals("downloaded")) {
downloaded = Long.parseLong(rhs);
} else if (lhs.equals("left")) {
left = Long.parseLong(rhs);
} else if (lhs.equals("numwant")) {
num_want = Integer.parseInt(rhs);
} else if (lhs.equals("azudp")) {
udp_port = Integer.parseInt(rhs);
if (compact_enabled) {
compact_mode = TRTrackerServerTorrentImpl.COMPACT_MODE_AZ;
}
} else if (lhs.equals("azhttp")) {
http_port = Integer.parseInt(rhs);
} else if (lhs.equals("azver")) {
az_ver = Integer.parseInt(rhs);
} else if (lhs.equals("supportcrypto")) {
if (crypto_level == TRTrackerServerPeer.CRYPTO_NONE) {
crypto_level = TRTrackerServerPeer.CRYPTO_SUPPORTED;
}
} else if (lhs.equals("requirecrypto")) {
crypto_level = TRTrackerServerPeer.CRYPTO_REQUIRED;
} else if (lhs.equals("cryptoport")) {
crypto_port = Integer.parseInt(rhs);
} else if (lhs.equals("azq")) {
stop_to_queue = true;
} else if (lhs.equals("azsf")) {
scrape_flags = rhs;
} else if (lhs.equals("link")) {
link = rhs;
} else if (lhs.equals("outform")) {
if (rhs.equals("xml")) {
xml_output = true;
}
} else if (lhs.equals("hide")) {
hide = Integer.parseInt(rhs) == 1;
} else if (TRTrackerServerImpl.supportsExtensions()) {
if (lhs.equals("aznp")) {
try {
network_position = DHTNetworkPositionManager.deserialisePosition(remote_address.getAddress(), Base32.decode(rhs));
} catch (Throwable e) {
}
} else if (lhs.equals("azup")) {
up_speed = Integer.parseInt(rhs);
}
}
if (p1 == -1) {
break;
}
}
if (hide) {
tcp_port = 0;
crypto_port = 0;
http_port = 0;
udp_port = 0;
}
if (crypto_level == TRTrackerServerPeer.CRYPTO_REQUIRED) {
if (crypto_port != 0) {
tcp_port = crypto_port;
}
}
byte[][] hashes = null;
if (hash_list != null) {
hashes = new byte[hash_list.size()][];
hash_list.toArray(hashes);
} else if (hash != null) {
hashes = new byte[][] { hash };
}
if (compact_enabled) {
if (xml_output) {
compact_mode = TRTrackerServerTorrentImpl.COMPACT_MODE_XML;
} else if (az_ver >= 2) {
compact_mode = TRTrackerServerTorrentImpl.COMPACT_MODE_AZ_2;
}
}
Map[] root_out = new Map[1];
TRTrackerServerPeerImpl[] peer_out = new TRTrackerServerPeerImpl[1];
specific_torrent = processTrackerRequest(server, str, root_out, peer_out, request_type, hashes, link, scrape_flags, peer_id, no_peer_id, compact_mode, key, event, stop_to_queue, tcp_port & 0xffff, udp_port & 0xffff, http_port & 0xffff, real_ip_address, client_ip_address, downloaded, uploaded, left, num_want, crypto_level, (byte) az_ver, up_speed, network_position);
root = root_out[0];
if (request_type == TRTrackerServerRequest.RT_SCRAPE) {
if (lowercase_input_header.contains(lc_azureus_name)) {
root.put("aztracker", new Long(1));
}
}
if (root.get("_data") == null) {
TRTrackerServerPeer post_process_peer = peer_out[0];
if (post_process_peer == null) {
post_process_peer = new lightweightPeer(client_ip_address, tcp_port, peer_id);
}
server.postProcess(post_process_peer, specific_torrent, request_type, str, root);
}
} catch (Exception e) {
String warning_message = null;
Map error_entries = null;
if (e instanceof TRTrackerServerException) {
TRTrackerServerException tr_excep = (TRTrackerServerException) e;
int reason = tr_excep.getResponseCode();
error_entries = tr_excep.getErrorEntries();
if (reason != -1) {
String resp = "HTTP/1.1 " + reason + " " + tr_excep.getResponseText() + NL;
Map headers = tr_excep.getResponseHeaders();
Iterator it = headers.entrySet().iterator();
while (it.hasNext()) {
Map.Entry entry = (Map.Entry) it.next();
String key = (String) entry.getKey();
String value = (String) entry.getValue();
if (key.equalsIgnoreCase("connection")) {
if (!value.equalsIgnoreCase("close")) {
Debug.out("Ignoring 'Connection' header");
continue;
}
}
resp += key + ": " + value + NL;
}
resp += "Connection: close" + NL;
byte[] payload = null;
if (error_entries != null) {
payload = BEncoder.encode(error_entries);
resp += "Content-Length: " + payload.length + NL;
} else {
resp += "Content-Length: 0" + NL;
}
resp += NL;
os.write(resp.getBytes());
if (payload != null) {
os.write(payload);
}
os.flush();
return (false);
}
if (tr_excep.isUserMessage()) {
warning_message = tr_excep.getMessage();
}
} else if (e instanceof NullPointerException) {
e.printStackTrace();
}
String message = e.getMessage();
if (message == null || message.length() == 0) {
// e.printStackTrace();
message = e.toString();
}
root = new HashMap();
root.put("failure reason", message);
if (warning_message != null) {
root.put("warning message", warning_message);
}
if (error_entries != null) {
root.putAll(error_entries);
}
}
setTaskState("writing response");
byte[] data;
byte[] header_start;
if (xml_output) {
StringBuilder xml = new StringBuilder("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
xml.append("<RESULT>");
if (specific_torrent != null) {
xml.append("<BTIH>");
xml.append(ByteFormatter.encodeString(specific_torrent.getHash().getBytes()));
xml.append("</BTIH>");
xml.append(BEncoder.encodeToXML(root, true));
}
xml.append("</RESULT>");
data = xml.toString().getBytes("UTF-8");
header_start = HTTP_RESPONSE_XML_START;
} else {
// cache both plain and gzip encoded data for possible reuse
data = (byte[]) root.get("_data");
if (data == null) {
data = BEncoder.encode(root);
if (data.length > 1000000) {
File dump = new File("bdecoder.dump");
synchronized (TRTrackerServerProcessorTCP.class) {
try {
Debug.out("Output is too large, saving diagnostics to " + dump.toString());
PrintWriter pw = new PrintWriter(new FileWriter(dump));
BDecoder.print(pw, root);
pw.close();
} catch (Throwable e) {
}
}
}
root.put("_data", data);
}
header_start = HTTP_RESPONSE_START;
}
if (gzip_reply) {
byte[] gzip_data = (byte[]) root.get("_gzipdata");
if (gzip_data == null) {
ByteArrayOutputStream tos = new ByteArrayOutputStream(data.length);
GZIPOutputStream gos = new GZIPOutputStream(tos);
gos.write(data);
gos.close();
gzip_data = tos.toByteArray();
root.put("_gzipdata", gzip_data);
}
data = gzip_data;
}
// System.out.println( "TRTrackerServerProcessor::reply: sending " + new String(data));
// write the response
setTaskState("writing header");
os.write(header_start);
byte[] length_bytes = String.valueOf(data.length).getBytes();
os.write(length_bytes);
int header_len = header_start.length + length_bytes.length;
setTaskState("writing content");
if (gzip_reply) {
os.write(HTTP_RESPONSE_END_GZIP);
header_len += HTTP_RESPONSE_END_GZIP.length;
} else {
os.write(HTTP_RESPONSE_END_NOGZIP);
header_len += HTTP_RESPONSE_END_NOGZIP.length;
}
os.write(data);
server.updateStats(request_type, specific_torrent, input_header.length(), header_len + data.length);
} finally {
setTaskState("final os flush");
os.flush();
}
return (false);
}
Aggregations