use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class FMFileImpl method moveFile.
@Override
public void moveFile(File new_unlinked_file) throws FMFileManagerException {
try {
this_mon.enter();
TOTorrentFile tf = owner.getTorrentFile();
String new_canonical_path;
File new_linked_file = manager.getFileLink(tf.getTorrent(), tf.getIndex(), new_unlinked_file);
try {
try {
new_canonical_path = new_linked_file.getCanonicalPath();
} catch (IOException ioe) {
String msg = ioe.getMessage();
if (msg != null && msg.contains("There are no more files")) {
String abs_path = new_linked_file.getAbsolutePath();
String error = "Caught 'There are no more files' exception during new_file.getCanonicalPath(). " + "os=[" + Constants.OSName + "], new_file.getPath()=[" + new_linked_file.getPath() + "], new_file.getAbsolutePath()=[" + abs_path + "]. ";
// "new_canonical_path temporarily set to [" +abs_path+ "]";
Debug.out(error, ioe);
}
throw ioe;
}
} catch (Throwable e) {
throw (new FMFileManagerException("getCanonicalPath fails", e));
}
if (new_linked_file.exists()) {
throw (new FMFileManagerException("moveFile fails - file '" + new_canonical_path + "' already exists"));
}
boolean was_open = isOpen();
// full close, this will release any slots in the limited file case
close();
createDirs(new_linked_file);
if (!linked_file.exists() || FileUtil.renameFile(linked_file, new_linked_file)) {
linked_file = new_linked_file;
canonical_path = new_canonical_path;
reserveFile();
if (was_open) {
// ensure open will regain slots in limited file case
ensureOpen("moveFile target");
}
} else {
try {
reserveFile();
} catch (FMFileManagerException e) {
Debug.printStackTrace(e);
}
if (was_open) {
try {
ensureOpen("moveFile recovery");
} catch (FMFileManagerException e) {
Debug.printStackTrace(e);
}
}
throw (new FMFileManagerException("moveFile fails"));
}
} finally {
this_mon.exit();
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class CacheFileManagerImpl method createFile.
@Override
public CacheFile createFile(final CacheFileOwner owner, File file, int type) throws CacheFileManagerException {
final long my_id;
// we differentiate the
try {
this_mon.enter();
my_id = cache_file_id_next++;
} finally {
this_mon.exit();
}
int fm_type = convertCacheToFileType(type);
try {
FMFile fm_file = file_manager.createFile(new FMFileOwner() {
@Override
public String getName() {
return (owner.getCacheFileOwnerName() + "[" + my_id + "]");
}
@Override
public TOTorrentFile getTorrentFile() {
return (owner.getCacheFileTorrentFile());
}
@Override
public File getControlFileDir() {
return (owner.getCacheFileControlFileDir());
}
}, file, fm_type);
TOTorrentFile tf = owner.getCacheFileTorrentFile();
CacheFile cf;
int cache_mode = owner.getCacheMode();
if (cache_mode == CacheFileOwner.CACHE_MODE_EXPERIMENTAL) {
cf = new CacheFileWithoutCacheMT(this, fm_file, tf);
} else if ((tf != null && tf.getLength() < cache_files_not_smaller_than) || !cache_enabled || cache_mode == CacheFileOwner.CACHE_MODE_NO_CACHE) {
cf = new CacheFileWithoutCache(this, fm_file, tf);
} else {
cf = new CacheFileWithCache(this, fm_file, tf);
try {
this_mon.enter();
if (updated_cache_files == null) {
updated_cache_files = new WeakHashMap(cache_files);
}
// copy on write so readers don't need to synchronize or copy
updated_cache_files.put(cf, null);
if (tf != null) {
torrent_to_cache_file_map.put(tf, cf);
}
} finally {
this_mon.exit();
}
}
return (cf);
} catch (FMFileManagerException e) {
rethrow(null, e);
return (null);
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class CacheFileManagerImpl method getBytesInCache.
protected boolean[] getBytesInCache(TOTorrent torrent, long[] absoluteOffsets, long[] lengths) {
// sanity checks
if (absoluteOffsets.length != lengths.length)
throw new IllegalArgumentException("Offsets/Lengths mismatch");
long prevEnding = 0;
for (int i = 0; i < lengths.length; i++) {
if (absoluteOffsets[i] < prevEnding || lengths[i] <= 0)
throw new IllegalArgumentException("Offsets/Lengths are not in ascending order");
prevEnding = absoluteOffsets[i] + lengths[i];
}
TOTorrentFile[] files = torrent.getFiles();
long[] fileOffsets = new long[files.length];
boolean[] results = new boolean[absoluteOffsets.length];
// assume everything to be cached, then check for the opposite
Arrays.fill(results, true);
final long first = absoluteOffsets[0];
final long last = absoluteOffsets[absoluteOffsets.length - 1] + lengths[lengths.length - 1];
long fileOffset = 0;
int firstFile = -1;
boolean lockAcquired = false;
Map localCacheMap = new LightHashMap();
try {
for (int i = 0; i < files.length; i++) {
TOTorrentFile tf = files[i];
long length = tf.getLength();
fileOffsets[i] = fileOffset;
if (firstFile == -1 && fileOffset <= first && first < fileOffset + length) {
firstFile = i;
this_mon.enter();
lockAcquired = true;
}
if (fileOffset > last)
break;
if (lockAcquired) {
CacheFileWithCache cache_file = (CacheFileWithCache) torrent_to_cache_file_map.get(tf);
localCacheMap.put(tf, cache_file);
}
fileOffset += length;
}
} finally {
if (lockAcquired)
this_mon.exit();
}
for (int i = firstFile; -1 < i && i < files.length; i++) {
TOTorrentFile tf = files[i];
CacheFileWithCache cache_file = (CacheFileWithCache) localCacheMap.get(tf);
long length = tf.getLength();
fileOffset = fileOffsets[i];
if (fileOffset > last)
break;
if (cache_file != null)
cache_file.getBytesInCache(results, absoluteOffsets, lengths);
else
// we have no cache file and thus no cache entries
for (// check if any chunks fall into this non-file
int j = 0; // check if any chunks fall into this non-file
j < results.length; // check if any chunks fall into this non-file
j++) if ((absoluteOffsets[j] < fileOffset + length && absoluteOffsets[j] > fileOffset) || (absoluteOffsets[j] + lengths[j] < fileOffset + length && absoluteOffsets[j] + lengths[j] > fileOffset))
// no file -> no cache entry
results[j] = false;
}
if (// never found a matching torrentfile
!lockAcquired)
Arrays.fill(results, false);
return results;
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class CacheFileManagerImpl method generate.
@Override
public void generate(IndentWriter writer) {
writer.println("Cache Manager");
try {
writer.indent();
Iterator it;
try {
this_mon.enter();
it = new ArrayList(cache_entries.keySet()).iterator();
} finally {
this_mon.exit();
}
writer.println("Entries = " + cache_entries.size());
Set files = new HashSet();
while (it.hasNext()) {
CacheEntry entry = (CacheEntry) it.next();
CacheFileWithCache file = entry.getFile();
if (!files.contains(file)) {
files.add(file);
TOTorrentFile torrentFile = file.getTorrentFile();
String fileLength = "";
try {
fileLength = "" + file.getLength();
} catch (Exception e) {
if (torrentFile != null)
fileLength = "" + torrentFile.getLength();
}
String hash = "<unknown>";
try {
if (torrentFile != null)
hash = ByteFormatter.encodeString(torrentFile.getTorrent().getHash());
} catch (Throwable e) {
}
String name = file.getName();
writer.println("File: " + Debug.secretFileName(name) + ", size " + fileLength + ", torrent " + hash + ", access = " + file.getAccessMode());
}
}
} finally {
writer.exdent();
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class DiskManagerRandomReadController method executeRequest.
private void executeRequest() {
DiskManagerRandomReadRequestImpl request;
synchronized (requests) {
if (requests.isEmpty()) {
return;
}
request = requests.remove(0);
}
if (request.isCancelled()) {
return;
}
DiskManagerFileInfoListener info_listener = null;
com.biglybt.core.disk.DiskManagerFileInfo core_file = request.getFile().getCore();
DownloadManager core_download = core_file.getDownloadManager();
int prev_hint_piece = -1;
int curr_hint_piece = -1;
try {
if (core_download.getTorrent() == null) {
throw (new DownloadException("Torrent invalid"));
}
if (core_download.isDestroyed()) {
Debug.out("Download has been removed");
throw (new DownloadException("Download has been removed"));
}
TOTorrentFile tf = core_file.getTorrentFile();
TOTorrent torrent = tf.getTorrent();
TOTorrentFile[] tfs = torrent.getFiles();
long core_file_start_byte = 0;
for (int i = 0; i < core_file.getIndex(); i++) {
core_file_start_byte += tfs[i].getLength();
}
long download_byte_start = core_file_start_byte + request.getOffset();
long download_byte_end = download_byte_start + request.getLength();
int piece_size = (int) tf.getTorrent().getPieceLength();
if (core_file.getDownloaded() != core_file.getLength()) {
if (core_file.isSkipped()) {
core_file.setSkipped(false);
}
boolean force_start = download.isForceStart();
if (!force_start) {
download.setForceStart(true);
set_force_start = true;
final AESemaphore running_sem = new AESemaphore("rs");
DownloadListener dl_listener = new DownloadListener() {
@Override
public void stateChanged(Download download, int old_state, int new_state) {
if (new_state == Download.ST_DOWNLOADING || new_state == Download.ST_SEEDING) {
running_sem.release();
}
}
@Override
public void positionChanged(Download download, int oldPosition, int newPosition) {
}
};
download.addListener(dl_listener);
try {
if (download.getState() != Download.ST_DOWNLOADING && download.getState() != Download.ST_SEEDING) {
if (!running_sem.reserve(10 * 1000)) {
throw (new DownloadException("timeout waiting for download to start"));
}
}
} finally {
download.removeListener(dl_listener);
}
}
}
boolean is_reverse = request.isReverse();
final AESemaphore wait_sem = new AESemaphore("rr:waiter");
info_listener = new DiskManagerFileInfoListener() {
@Override
public void dataWritten(long offset, long length) {
wait_sem.release();
}
@Override
public void dataChecked(long offset, long length) {
}
};
long start_time = SystemTime.getMonotonousTime();
boolean has_started = false;
core_file.addListener(info_listener);
while (download_byte_start < download_byte_end) {
if (request.isCancelled()) {
throw (new Exception("request cancelled"));
}
// System.out.println( "Request current: " + download_byte_start + " -> " + download_byte_end );
long now = SystemTime.getMonotonousTime();
int piece_start = (int) (download_byte_start / piece_size);
int piece_start_offset = (int) (download_byte_start % piece_size);
int piece_end = (int) ((download_byte_end - 1) / piece_size);
int piece_end_offset = (int) ((download_byte_end - 1) % piece_size) + 1;
// System.out.println( " piece details: " + piece_start + "/" + piece_start_offset + " -> " + piece_end + "/" + piece_end_offset );
DiskManagerPiece[] pieces = null;
DiskManager disk_manager = core_download.getDiskManager();
if (disk_manager != null) {
pieces = disk_manager.getPieces();
}
long avail_start;
long avail_end;
if (pieces == null) {
if (core_file.getDownloaded() == core_file.getLength()) {
avail_start = download_byte_start;
avail_end = download_byte_end;
} else {
if (now - start_time < 10000 && !has_started) {
wait_sem.reserve(250);
continue;
}
throw (new Exception("download stopped"));
}
} else {
has_started = true;
if (is_reverse) {
long min_done = download_byte_end;
for (int i = piece_end; i >= piece_start; i--) {
int p_start = i == piece_start ? piece_start_offset : 0;
int p_end = i == piece_end ? piece_end_offset : piece_size;
DiskManagerPiece piece = pieces[i];
boolean[] done = piece.getWritten();
if (done == null) {
if (piece.isDone()) {
min_done = i * (long) piece_size;
continue;
} else {
break;
}
}
int block_size = piece.getBlockSize(0);
int first_block = p_start / block_size;
int last_block = (p_end - 1) / block_size;
for (int j = last_block; j >= first_block; j--) {
if (done[j]) {
min_done = i * (long) piece_size + j * block_size;
} else {
break;
}
}
}
avail_start = Math.max(download_byte_start, min_done);
avail_end = download_byte_end;
} else {
long max_done = download_byte_start;
for (int i = piece_start; i <= piece_end; i++) {
int p_start = i == piece_start ? piece_start_offset : 0;
int p_end = i == piece_end ? piece_end_offset : piece_size;
DiskManagerPiece piece = pieces[i];
boolean[] done = piece.getWritten();
if (done == null) {
if (piece.isDone()) {
max_done = (i + 1) * (long) piece_size;
continue;
} else {
break;
}
}
int block_size = piece.getBlockSize(0);
int first_block = p_start / block_size;
int last_block = (p_end - 1) / block_size;
for (int j = first_block; j <= last_block; j++) {
if (done[j]) {
max_done = i * (long) piece_size + (j + 1) * block_size;
} else {
break;
}
}
}
avail_start = download_byte_start;
avail_end = Math.min(download_byte_end, max_done);
}
}
// System.out.println( " avail: " + avail_start + " -> " + avail_end );
int max_chunk = 128 * 1024;
if (avail_end > avail_start) {
long length = avail_end - avail_start;
if (length > max_chunk) {
if (is_reverse) {
avail_start = avail_end - max_chunk;
} else {
avail_end = avail_start + max_chunk;
}
}
// System.out.println( "got data: " + avail_start + " -> " + avail_end );
long read_offset = avail_start - core_file_start_byte;
int read_length = (int) (avail_end - avail_start);
DirectByteBuffer buffer = core_file.read(read_offset, read_length);
request.dataAvailable(buffer, read_offset, read_length);
if (is_reverse) {
download_byte_end = avail_start;
} else {
download_byte_start = avail_end;
}
continue;
}
PEPeerManager pm = core_download.getPeerManager();
if (pm == null) {
if (now - start_time < 10000 && !has_started) {
wait_sem.reserve(250);
continue;
}
throw (new Exception("download stopped"));
} else {
has_started = true;
}
PiecePicker picker = pm.getPiecePicker();
picker.setReverseBlockOrder(is_reverse);
int hint_piece;
int hint_offset;
int hint_length;
if (piece_start == piece_end) {
hint_piece = piece_start;
hint_offset = piece_start_offset;
hint_length = piece_end_offset - piece_start_offset;
} else {
if (is_reverse) {
hint_piece = piece_end;
hint_offset = 0;
hint_length = piece_end_offset;
} else {
hint_piece = piece_start;
hint_offset = piece_start_offset;
hint_length = piece_size - piece_start_offset;
}
}
if (curr_hint_piece == -1) {
int[] existing = picker.getGlobalRequestHint();
if (existing != null) {
curr_hint_piece = existing[0];
}
}
// System.out.println( "hint: " + hint_piece + "/" + hint_offset + "/" + hint_length + ": curr=" + curr_hint_piece + ", prev=" + prev_hint_piece );
picker.setGlobalRequestHint(hint_piece, hint_offset, hint_length);
if (hint_piece != curr_hint_piece) {
prev_hint_piece = curr_hint_piece;
curr_hint_piece = hint_piece;
}
if (prev_hint_piece != -1) {
clearHint(pm, prev_hint_piece);
}
wait_sem.reserve(250);
}
} catch (Throwable e) {
request.failed(e);
} finally {
PEPeerManager pm = core_download.getPeerManager();
if (pm != null) {
PiecePicker picker = pm.getPiecePicker();
if (picker != null) {
picker.setReverseBlockOrder(false);
picker.setGlobalRequestHint(-1, 0, 0);
if (curr_hint_piece != -1) {
clearHint(pm, curr_hint_piece);
}
}
}
if (info_listener != null) {
core_file.removeListener(info_listener);
}
}
}
Aggregations