use of com.biglybt.core.util.ConcurrentHasher in project BiglyBT by BiglySoftware.
the class PEPeerControlHashHandlerImpl method getPieceTree.
public void getPieceTree(PieceTreeReceiver receiver, TOTorrentFileHashTree tree, int piece_offset) {
TOTorrentFile file = tree.getFile();
int piece_number = file.getFirstPieceNumber() + piece_offset;
if (!disk_manager.isDone(piece_number)) {
receiver.receivePieceTree(piece_offset, null);
return;
}
byte[][] existing;
synchronized (piece_tree_cache) {
existing = piece_tree_cache.get(piece_number);
}
if (existing != null) {
last_piece_tree_request = SystemTime.getMonotonousTime();
receiver.receivePieceTree(piece_offset, existing);
return;
}
PieceTreeRequest piece_tree_request;
synchronized (piece_tree_requests) {
piece_tree_request = piece_tree_requests.get(piece_number);
if (piece_tree_request != null) {
piece_tree_request.addListener(receiver);
return;
} else {
piece_tree_request = new PieceTreeRequest(piece_offset, piece_number, receiver);
piece_tree_requests.put(piece_number, piece_tree_request);
}
}
PieceTreeRequest f_piece_tree_request = piece_tree_request;
// System.out.println( "building hash tree for " + piece_number );
boolean went_async = false;
try {
byte[] piece_hash = torrent.getPieces()[piece_number];
int piece_size = disk_manager.getPieceLength(piece_number);
PEPeerTransport peer = ((HashesReceiverImpl) receiver.getHashesReceiver()).getPeer();
PeerStats stats = (PeerStats) peer.getUserData(KEY_PEER_STATS);
stats.pieceTreeRequest(piece_size);
disk_manager.enqueueReadRequest(disk_manager.createReadRequest(piece_number, 0, piece_size), new DiskManagerReadRequestListener() {
public void readCompleted(DiskManagerReadRequest request, DirectByteBuffer data) {
boolean async_hashing = false;
try {
ByteBuffer byte_buffer = data.getBuffer(DirectByteBuffer.SS_OTHER);
DMPieceList pieceList = disk_manager.getPieceList(piece_number);
DMPieceMapEntry piece_entry = pieceList.get(0);
if (pieceList.size() == 2) {
int v2_piece_length = piece_entry.getLength();
if (v2_piece_length < piece_length) {
// hasher will pad appropriately
byte_buffer.limit(byte_buffer.position() + v2_piece_length);
}
}
ConcurrentHasher hasher = ConcurrentHasher.getSingleton();
hasher.addRequest(byte_buffer, 2, piece_size, file.getLength(), (completed_request) -> {
byte[][] hashes = null;
try {
if (Arrays.equals(completed_request.getResult(), piece_hash)) {
List<List<byte[]>> tree = completed_request.getHashTree();
if (tree != null) {
hashes = new byte[tree.size()][];
int layer_index = hashes.length - 1;
for (List<byte[]> entry : tree) {
byte[] layer = new byte[entry.size() * SHA256.DIGEST_LENGTH];
hashes[layer_index--] = layer;
int layer_pos = 0;
for (byte[] hash : entry) {
System.arraycopy(hash, 0, layer, layer_pos, SHA256.DIGEST_LENGTH);
layer_pos += SHA256.DIGEST_LENGTH;
}
}
last_piece_tree_request = SystemTime.getMonotonousTime();
synchronized (piece_tree_cache) {
piece_tree_cache.put(piece_number, hashes);
}
}
}
} finally {
data.returnToPool();
f_piece_tree_request.complete(hashes);
}
}, false);
async_hashing = true;
} finally {
if (!async_hashing) {
data.returnToPool();
f_piece_tree_request.complete(null);
}
}
}
public void readFailed(DiskManagerReadRequest request, Throwable cause) {
f_piece_tree_request.complete(null);
}
public int getPriority() {
return (-1);
}
public void requestExecuted(long bytes) {
}
});
went_async = true;
} catch (Throwable e) {
Debug.out(e);
} finally {
if (!went_async) {
piece_tree_request.complete(null);
}
}
}
Aggregations