use of com.biglybt.core.torrent.TOTorrentFileHashTree in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method receivedOrRejectedHashes.
private void receivedOrRejectedHashes(PEPeerTransport peer, byte[] root_hash, int base_layer, int index, int length, int proof_layers, // null if rejected
byte[][] hashes) {
try {
TOTorrentFileHashTree tree = file_map.get(root_hash);
if (tree != null) {
if (hashes != null) {
tree.receivedHashes(root_hash, base_layer, index, length, proof_layers, hashes);
}
List<HashListener> listeners = null;
synchronized (peer_requests) {
List<PeerHashRequest> peer_reqs = peer_requests.get(peer);
if (peer_reqs != null) {
Iterator<PeerHashRequest> it = peer_reqs.iterator();
PeerHashRequest match = null;
while (it.hasNext()) {
PeerHashRequest peer_request = it.next();
HashRequest req = peer_request.getRequest();
if (Arrays.equals(root_hash, req.getRootHash()) && base_layer == req.getBaseLayer() && index == req.getOffset() && length == req.getLength() && proof_layers == req.getProofLayers()) {
match = peer_request;
it.remove();
break;
}
}
if (match != null) {
if (peer_reqs.isEmpty()) {
peer_requests.remove(peer);
}
if (!active_requests.remove(match)) {
Debug.out("entry not found");
}
removeFromPieceRequests(match);
match.setComplete();
listeners = match.getListeners();
}
}
}
if (listeners != null) {
for (HashListener l : listeners) {
try {
l.complete(hashes != null);
} catch (Throwable e) {
Debug.out(e);
}
}
}
}
} catch (Throwable e) {
Debug.out(e);
}
}
use of com.biglybt.core.torrent.TOTorrentFileHashTree in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method receivedHashRequest.
@Override
public void receivedHashRequest(PEPeerTransport peer, HashesReceiver receiver, byte[] root_hash, int base_layer, int index, int length, int proof_layers) {
PeerStats stats = (PeerStats) peer.getUserData(KEY_PEER_STATS);
if (stats == null) {
stats = new PeerStats(peer);
peer.setUserData(KEY_PEER_STATS, stats);
}
try {
TOTorrentFileHashTree tree = file_map.get(root_hash);
if (tree != null) {
int related_bytes;
if (base_layer == 0) {
// leaf layer
related_bytes = 16 * 1024 * length;
} else {
// assume pieces layer
related_bytes = piece_length * length;
}
stats.hashesRequested(related_bytes);
// this does rate limiting
stats.runTask(() -> {
if (tree.requestHashes(this, new HashesReceiverImpl(peer, receiver), root_hash, base_layer, index, length, proof_layers)) {
// request has been accepted and receiver will be informed of result
} else {
receiver.receiveResult(null);
}
});
return;
}
} catch (Throwable e) {
Debug.out(e);
}
receiver.receiveResult(null);
}
use of com.biglybt.core.torrent.TOTorrentFileHashTree in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method request.
private PeerHashRequest request(PEPeerTransport peer, int piece_number, HashListener listener_maybe_null) {
TOTorrentFileHashTree.HashRequest hash_req;
TOTorrentFile file = disk_manager.getPieceList(piece_number).get(0).getFile().getTorrentFile();
TOTorrentFileHashTree tree = file.getHashTree();
if (tree == null) {
return (null);
}
PeerHashRequest peer_request;
synchronized (peer_requests) {
if (piece_requests != null && piece_requests[piece_number] != null) {
if (listener_maybe_null != null) {
// add listener to any entry, doesn't matter which
piece_requests[piece_number][0].addListener(listener_maybe_null);
}
return (piece_requests[piece_number][0]);
}
hash_req = tree.requestPieceHash(piece_number, peer.getAvailable());
if (hash_req == null) {
return (null);
}
if (active_requests.size() > 2048) {
Debug.out("Too many active hash requests");
return (null);
}
peer_request = new PeerHashRequest(peer, file, hash_req, listener_maybe_null);
active_requests.add(peer_request);
List<PeerHashRequest> peer_reqs = peer_requests.get(peer);
if (peer_reqs == null) {
peer_reqs = new ArrayList<>();
peer_requests.put(peer, peer_reqs);
}
peer_reqs.add(peer_request);
if (piece_requests == null) {
piece_requests = new PeerHashRequest[torrent.getNumberOfPieces()][];
}
int offset = hash_req.getOffset() + file.getFirstPieceNumber();
int length = hash_req.getLength();
PeerHashRequest[] pr = new PeerHashRequest[] { peer_request };
int pos = offset;
int end = offset + length;
while (pos < end && pos < piece_requests.length) {
PeerHashRequest[] existing = piece_requests[pos];
if (existing == null) {
// usual case
piece_requests[pos] = pr;
} else {
int len = existing.length;
PeerHashRequest[] temp = new PeerHashRequest[len + 1];
for (int i = 0; i < len; i++) {
temp[i] = existing[i];
}
temp[len] = peer_request;
piece_requests[pos] = temp;
}
pos++;
}
}
peer.sendHashRequest(hash_req);
return (peer_request);
}
use of com.biglybt.core.torrent.TOTorrentFileHashTree in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method getPieceTree.
public void getPieceTree(PieceTreeReceiver receiver, TOTorrentFileHashTree tree, int piece_offset) {
TOTorrentFile file = tree.getFile();
int piece_number = file.getFirstPieceNumber() + piece_offset;
if (!disk_manager.isDone(piece_number)) {
receiver.receivePieceTree(piece_offset, null);
return;
}
byte[][] existing;
synchronized (piece_tree_cache) {
existing = piece_tree_cache.get(piece_number);
}
if (existing != null) {
last_piece_tree_request = SystemTime.getMonotonousTime();
receiver.receivePieceTree(piece_offset, existing);
return;
}
PieceTreeRequest piece_tree_request;
synchronized (piece_tree_requests) {
piece_tree_request = piece_tree_requests.get(piece_number);
if (piece_tree_request != null) {
piece_tree_request.addListener(receiver);
return;
} else {
piece_tree_request = new PieceTreeRequest(piece_offset, piece_number, receiver);
piece_tree_requests.put(piece_number, piece_tree_request);
}
}
PieceTreeRequest f_piece_tree_request = piece_tree_request;
// System.out.println( "building hash tree for " + piece_number );
boolean went_async = false;
try {
byte[] piece_hash = torrent.getPieces()[piece_number];
int piece_size = disk_manager.getPieceLength(piece_number);
PEPeerTransport peer = ((HashesReceiverImpl) receiver.getHashesReceiver()).getPeer();
PeerStats stats = (PeerStats) peer.getUserData(KEY_PEER_STATS);
stats.pieceTreeRequest(piece_size);
disk_manager.enqueueReadRequest(disk_manager.createReadRequest(piece_number, 0, piece_size), new DiskManagerReadRequestListener() {
public void readCompleted(DiskManagerReadRequest request, DirectByteBuffer data) {
boolean async_hashing = false;
try {
ByteBuffer byte_buffer = data.getBuffer(DirectByteBuffer.SS_OTHER);
DMPieceList pieceList = disk_manager.getPieceList(piece_number);
DMPieceMapEntry piece_entry = pieceList.get(0);
if (pieceList.size() == 2) {
int v2_piece_length = piece_entry.getLength();
if (v2_piece_length < piece_length) {
// hasher will pad appropriately
byte_buffer.limit(byte_buffer.position() + v2_piece_length);
}
}
ConcurrentHasher hasher = ConcurrentHasher.getSingleton();
hasher.addRequest(byte_buffer, 2, piece_size, file.getLength(), (completed_request) -> {
byte[][] hashes = null;
try {
if (Arrays.equals(completed_request.getResult(), piece_hash)) {
List<List<byte[]>> tree = completed_request.getHashTree();
if (tree != null) {
hashes = new byte[tree.size()][];
int layer_index = hashes.length - 1;
for (List<byte[]> entry : tree) {
byte[] layer = new byte[entry.size() * SHA256.DIGEST_LENGTH];
hashes[layer_index--] = layer;
int layer_pos = 0;
for (byte[] hash : entry) {
System.arraycopy(hash, 0, layer, layer_pos, SHA256.DIGEST_LENGTH);
layer_pos += SHA256.DIGEST_LENGTH;
}
}
last_piece_tree_request = SystemTime.getMonotonousTime();
synchronized (piece_tree_cache) {
piece_tree_cache.put(piece_number, hashes);
}
}
}
} finally {
data.returnToPool();
f_piece_tree_request.complete(hashes);
}
}, false);
async_hashing = true;
} finally {
if (!async_hashing) {
data.returnToPool();
f_piece_tree_request.complete(null);
}
}
}
public void readFailed(DiskManagerReadRequest request, Throwable cause) {
f_piece_tree_request.complete(null);
}
public int getPriority() {
return (-1);
}
public void requestExecuted(long bytes) {
}
});
went_async = true;
} catch (Throwable e) {
Debug.out(e);
} finally {
if (!went_async) {
piece_tree_request.complete(null);
}
}
}
use of com.biglybt.core.torrent.TOTorrentFileHashTree in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method update.
@Override
public void update() {
long now = SystemTime.getMonotonousTime();
if (!save_done_on_complete && disk_manager.getRemaining() == 0 && piece_hashes_received.get() > 0) {
save_done_on_complete = true;
peer_manager.getAdapter().saveTorrentState();
}
if (last_piece_tree_request > 0 && now - last_piece_tree_request > 60 * 1000) {
last_piece_tree_request = -1;
synchronized (piece_tree_cache) {
piece_tree_cache.clear();
}
}
if (now - last_update >= 30 * 1000) {
List<PieceTreeRequest> expired = new ArrayList<>();
synchronized (piece_tree_requests) {
Iterator<PieceTreeRequest> it = piece_tree_requests.values().iterator();
while (it.hasNext()) {
PieceTreeRequest req = it.next();
if (now - req.getCreateTime() > 30 * 1000) {
it.remove();
expired.add(req);
}
}
}
for (PieceTreeRequest req : expired) {
Debug.out("PieceTreeRequest expired, derp");
req.complete(null);
}
}
List<PeerHashRequest> expired = new ArrayList<>();
List<PeerHashRequest> retry = new ArrayList<>();
synchronized (peer_requests) {
{
Iterator<PeerHashRequest> request_it = active_requests.iterator();
while (request_it.hasNext()) {
PeerHashRequest peer_request = request_it.next();
boolean remove = false;
long age = now - peer_request.getCreateTime();
if (age > 10 * 1000) {
expired.add(peer_request);
remove = true;
} else if (age > 5 * 1000) {
if (peer_request.getPeer().getPeerState() != PEPeer.TRANSFERING) {
retry.add(peer_request);
remove = true;
}
}
if (remove) {
request_it.remove();
PEPeerTransport peer = peer_request.getPeer();
List<PeerHashRequest> peer_reqs = peer_requests.get(peer);
if (peer_reqs == null) {
Debug.out("entry not found");
} else {
peer_reqs.remove(peer_request);
if (peer_reqs.isEmpty()) {
peer_requests.remove(peer);
}
}
removeFromPieceRequests(peer_request);
peer_request.setComplete();
} else {
break;
}
}
}
if (incomplete_trees.isEmpty()) {
if (!save_done_on_complete && piece_hashes_received.get() > 0) {
save_done_on_complete = true;
peer_manager.getAdapter().saveTorrentState();
}
} else {
byte[][] pieces = null;
int[] peer_availability = null;
boolean has_seeds = false;
Iterator<PeerHashRequest> request_it = incomplete_tree_reqs.values().iterator();
while (request_it.hasNext()) {
PeerHashRequest peer_request = request_it.next();
if (peer_request.isComplete()) {
request_it.remove();
}
}
Iterator<TOTorrentFileHashTree> tree_it = incomplete_trees.iterator();
while (tree_it.hasNext()) {
if (incomplete_tree_reqs.size() >= 10) {
break;
}
TOTorrentFileHashTree tree = tree_it.next();
PeerHashRequest peer_request = incomplete_tree_reqs.get(tree);
if (peer_request == null) {
if (tree.isPieceLayerComplete()) {
tree_it.remove();
} else {
if (pieces == null) {
try {
pieces = torrent.getPieces();
} catch (Throwable e) {
break;
}
}
TOTorrentFile file = tree.getFile();
int start = file.getFirstPieceNumber();
int end = file.getLastPieceNumber();
for (int i = start; i <= end; i++) {
if (pieces[i] == null) {
if (peer_availability == null) {
PiecePicker piece_picker = peer_manager.getPiecePicker();
if (piece_picker.getMinAvailability() >= 1) {
has_seeds = true;
} else {
peer_availability = piece_picker.getAvailability();
}
}
if (has_seeds || peer_availability[i] >= 1) {
PeerHashRequest req = hashRequestSupport(i, null);
if (req != null) {
incomplete_tree_reqs.put(tree, req);
break;
}
}
}
}
}
}
}
}
// System.out.println( "Active requests: " + active_requests.size() + ", peers=" + peer_requests + ", piece_req=" + piece_requests + ", incomplete tree req=" + incomplete_tree_reqs);
}
for (PeerHashRequest peer_request : retry) {
List<HashListener> listeners = peer_request.getListeners();
if (listeners != null) {
for (HashListener l : listeners) {
if (!hashRequest(l.getPieceNumber(), l)) {
l.complete(false);
}
}
}
}
for (PeerHashRequest peer_request : expired) {
List<HashListener> listeners = peer_request.getListeners();
if (listeners != null) {
for (HashListener l : listeners) {
try {
l.complete(false);
} catch (Throwable e) {
Debug.out(e);
}
}
}
}
last_update = now;
}
Aggregations