use of com.biglybt.pif.download.Download in project BiglyBT by BiglySoftware.
the class TranscodeJobImpl method toMap.
protected Map<String, Object> toMap() throws IOException {
try {
Map<String, Object> map = new HashMap<>();
synchronized (this) {
MapUtils.exportInt(map, "state", state);
MapUtils.setMapString(map, "error", error);
MapUtils.setMapString(map, "target", target.getID());
MapUtils.setMapString(map, "profile", profile.getUID());
try {
Download download = file.getDownload();
MapUtils.setMapString(map, "dl_hash", ByteFormatter.encodeString(download.getTorrent().getHash()));
MapUtils.exportInt(map, "file_index", file.getIndex());
} catch (DownloadException e) {
// external file
MapUtils.setMapString(map, "file", file.getFile().getAbsolutePath());
}
MapUtils.exportInt(map, "trans_req", transcode_requirement);
MapUtils.exportBooleanAsLong(map, "ar_enable", auto_retry_enabled);
MapUtils.exportBooleanAsLong(map, "pdi", prefer_direct_input);
}
return (map);
} catch (Throwable e) {
throw (new IOException("Export failed: " + Debug.getNestedExceptionMessage(e)));
}
}
use of com.biglybt.pif.download.Download in project BiglyBT by BiglySoftware.
the class DeviceUPnPImpl method isVisible.
protected boolean isVisible(ContentDownload file) {
if (getFilterFilesView() || file == null) {
return false;
}
Download download = file.getDownload();
if (download == null) {
return false;
}
if (download.isComplete()) {
return true;
}
int numFiles = download.getDiskManagerFileCount();
for (int i = 0; i < numFiles; i++) {
DiskManagerFileInfo fileInfo = download.getDiskManagerFileInfo(i);
if (fileInfo == null || fileInfo.isDeleted() || fileInfo.isSkipped()) {
continue;
}
if (fileInfo.getLength() == fileInfo.getDownloaded()) {
return true;
} else if (PlayUtils.canUseEMP(fileInfo)) {
return (true);
}
}
return false;
}
use of com.biglybt.pif.download.Download in project BiglyBT by BiglySoftware.
the class TranscodeQueueImpl method process.
protected boolean process(final TranscodeJobImpl job) {
TranscodePipe pipe = null;
current_job = job;
DeviceImpl device = job.getDevice();
device.setTranscoding(true);
try {
job.starts();
TranscodeProvider provider = job.getProfile().getProvider();
final TranscodeException[] error = { null };
TranscodeProfile profile = job.getProfile();
final TranscodeFileImpl transcode_file = job.getTranscodeFile();
TranscodeProviderAnalysis provider_analysis;
boolean xcode_required;
if (provider == null) {
xcode_required = false;
provider_analysis = null;
} else {
provider_analysis = analyse(job);
xcode_required = provider_analysis.getBooleanProperty(TranscodeProviderAnalysis.PT_TRANSCODE_REQUIRED);
int tt_req;
if (job.isStream()) {
// already advertised as a transcoded asset so no option not to
// transcode (as name/format would change if decided not to transcode and then
// this would confuse the clients)
tt_req = TranscodeTarget.TRANSCODE_ALWAYS;
} else {
tt_req = job.getTranscodeRequirement();
if (device instanceof TranscodeTarget) {
if (provider_analysis.getLongProperty(TranscodeProviderAnalysis.PT_VIDEO_HEIGHT) == 0) {
if (((TranscodeTarget) device).isAudioCompatible(transcode_file)) {
tt_req = TranscodeTarget.TRANSCODE_NEVER;
}
}
}
}
if (tt_req == TranscodeTarget.TRANSCODE_NEVER) {
xcode_required = false;
} else if (tt_req == TranscodeTarget.TRANSCODE_ALWAYS) {
xcode_required = true;
provider_analysis.setBooleanProperty(TranscodeProviderAnalysis.PT_FORCE_TRANSCODE, true);
}
}
if (xcode_required) {
final AESemaphore xcode_sem = new AESemaphore("xcode:proc");
final TranscodeProviderJob[] provider_job = { null };
TranscodeProviderAdapter xcode_adapter = new TranscodeProviderAdapter() {
private boolean resolution_updated;
private final int ETA_AVERAGE_SIZE = 10;
private int last_eta;
private int eta_samples;
private Average eta_average = AverageFactory.MovingAverage(ETA_AVERAGE_SIZE);
private int last_percent;
private long initial_file_downloaded = job.getFile().getDownloaded();
private long file_size = job.getFile().getLength();
@Override
public void updateProgress(int percent, int eta_secs, int new_width, int new_height) {
last_eta = eta_secs;
last_percent = percent;
TranscodeProviderJob prov_job = provider_job[0];
if (prov_job == null) {
return;
}
int job_state = job.getState();
if (job_state == TranscodeJob.ST_CANCELLED || job_state == TranscodeJob.ST_REMOVED) {
prov_job.cancel();
} else if (paused || job_state == TranscodeJob.ST_PAUSED) {
prov_job.pause();
} else {
if (job_state == TranscodeJob.ST_RUNNING) {
prov_job.resume();
}
job.updateProgress(percent, eta_secs);
prov_job.setMaxBytesPerSecond(max_bytes_per_sec);
if (!resolution_updated) {
if (new_width > 0 && new_height > 0) {
transcode_file.setResolution(new_width, new_height);
resolution_updated = true;
}
}
}
}
@Override
public void streamStats(long connect_rate, long write_speed) {
if (Constants.isOSX && job.getEnableAutoRetry() && job.canUseDirectInput() && job.getAutoRetryCount() == 0) {
if (connect_rate > 5 && last_percent < 100) {
long eta = (long) eta_average.update(last_eta);
eta_samples++;
if (eta_samples >= ETA_AVERAGE_SIZE) {
long total_time = (eta * 100) / (100 - last_percent);
long total_write = total_time * write_speed;
DiskManagerFileInfo file = job.getFile();
long length = file.getLength();
if (length > 0) {
double over_write = ((double) total_write) / length;
if (over_write > 5.0) {
failed(new TranscodeException("Overwrite limit exceeded, abandoning transcode"));
provider_job[0].cancel();
}
}
}
} else {
eta_samples = 0;
}
}
}
@Override
public void failed(TranscodeException e) {
try {
if (error[0] == null) {
error[0] = e;
}
if (e.isRetryDisabled()) {
job.setEnableAutoRetry(false);
}
} finally {
xcode_sem.release();
}
}
@Override
public void complete() {
try {
// sanity check: for incomplete files at the start of the process ensure that they have completed
long current_downloaded = job.getFile().getDownloaded();
if (file_size > 0 && initial_file_downloaded < file_size && current_downloaded < file_size) {
if (error[0] == null) {
// actually this ain't so simple as we stream data prior to hash check completion (otherwise for
// large piece sizes we could be waiting for 4MB to complete downloading before playback)
// and getDownloaded() only returns the verified data size
long contiguous_downloaded = 0;
try {
DiskManagerFileInfo _file_info = job.getFile();
Download download = _file_info.getDownload();
com.biglybt.core.disk.DiskManagerFileInfo file_info = PluginCoreUtils.unwrap(_file_info);
TOTorrentFile torrent_file = file_info.getTorrentFile();
TOTorrent torrent = torrent_file.getTorrent();
TOTorrentFile[] torrent_files = torrent.getFiles();
long byte_start = 0;
for (TOTorrentFile tf : torrent_files) {
if (tf == torrent_file) {
break;
}
byte_start += tf.getLength();
}
DiskManager dm = download.getDiskManager();
if (dm == null) {
throw (new Exception("Download stopped"));
}
DiskManagerPiece[] pieces = PluginCoreUtils.unwrap(dm).getPieces();
long piece_size = torrent.getPieceLength();
int first_piece_index = (int) (byte_start / piece_size);
int first_piece_offset = (int) (byte_start % piece_size);
int last_piece_index = torrent_file.getLastPieceNumber();
DiskManagerPiece first_piece = pieces[first_piece_index];
if (!first_piece.isDone()) {
boolean[] blocks = first_piece.getWritten();
if (blocks == null) {
if (first_piece.isDone()) {
contiguous_downloaded = first_piece.getLength() - first_piece_offset;
}
} else {
int piece_offset = 0;
for (int j = 0; j < blocks.length; j++) {
if (blocks[j]) {
int block_size = first_piece.getBlockSize(j);
piece_offset = piece_offset + block_size;
if (contiguous_downloaded == 0) {
if (piece_offset > first_piece_offset) {
contiguous_downloaded = piece_offset - first_piece_offset;
}
} else {
contiguous_downloaded += block_size;
}
} else {
break;
}
}
}
} else {
contiguous_downloaded = first_piece.getLength() - first_piece_offset;
for (int i = first_piece_index + 1; i <= last_piece_index; i++) {
DiskManagerPiece piece = pieces[i];
if (piece.isDone()) {
contiguous_downloaded += piece.getLength();
} else {
boolean[] blocks = piece.getWritten();
if (blocks == null) {
if (piece.isDone()) {
contiguous_downloaded += piece.getLength();
} else {
break;
}
} else {
for (int j = 0; j < blocks.length; j++) {
if (blocks[j]) {
contiguous_downloaded += piece.getBlockSize(j);
} else {
break;
}
}
}
break;
}
}
}
} catch (Throwable e) {
// Debug.out( e );
}
if (contiguous_downloaded < file_size) {
// things might have improved, check again
current_downloaded = job.getFile().getDownloaded();
if (current_downloaded < file_size) {
Debug.out("Premature transcode termination: init=" + initial_file_downloaded + ", curr=" + current_downloaded + ", len=" + file_size);
error[0] = new TranscodeException("Transcode terminated prematurely");
}
}
}
}
} finally {
xcode_sem.release();
}
}
};
boolean direct_input = job.useDirectInput();
if (job.isStream()) {
/*
provider_job[0] =
provider.transcode(
adapter,
job.getFile(),
profile,
new File( "C:\\temp\\arse").toURI().toURL());
*/
pipe = new TranscodePipeStreamSource2(new TranscodePipeStreamSource2.streamListener() {
@Override
public void gotStream(InputStream is) {
job.setStream(is);
}
});
provider_job[0] = provider.transcode(xcode_adapter, provider_analysis, direct_input, job.getFile(), profile, new URL("tcp://127.0.0.1:" + pipe.getPort()));
} else {
File output_file = transcode_file.getCacheFile();
provider_job[0] = provider.transcode(xcode_adapter, provider_analysis, direct_input, job.getFile(), profile, output_file.toURI().toURL());
}
provider_job[0].setMaxBytesPerSecond(max_bytes_per_sec);
TranscodeQueueListener listener = new TranscodeQueueListener() {
@Override
public void jobAdded(TranscodeJob job) {
}
@Override
public void jobChanged(TranscodeJob changed_job) {
if (changed_job == job) {
int state = job.getState();
if (state == TranscodeJob.ST_PAUSED) {
provider_job[0].pause();
} else if (state == TranscodeJob.ST_RUNNING) {
provider_job[0].resume();
} else if (state == TranscodeJob.ST_CANCELLED || state == TranscodeJob.ST_STOPPED) {
provider_job[0].cancel();
}
}
}
@Override
public void jobRemoved(TranscodeJob removed_job) {
if (removed_job == job) {
provider_job[0].cancel();
}
}
};
try {
addListener(listener);
xcode_sem.reserve();
} finally {
removeListener(listener);
}
if (error[0] != null) {
throw (error[0]);
}
} else {
// no transcode required...
DiskManagerFileInfo source = job.getFile();
transcode_file.setTranscodeRequired(false);
if (job.isStream()) {
PluginInterface av_pi = PluginInitializer.getDefaultInterface().getPluginManager().getPluginInterfaceByID("azupnpav");
if (av_pi == null) {
throw (new TranscodeException("Media Server plugin not found"));
}
IPCInterface av_ipc = av_pi.getIPC();
String url_str = (String) av_ipc.invoke("getContentURL", new Object[] { source });
if (url_str == null || url_str.length() == 0) {
// see if we can use the file directly
File source_file = source.getFile();
if (source_file.exists()) {
job.setStream(new BufferedInputStream(new FileInputStream(source_file)));
} else {
throw (new TranscodeException("No UPnPAV URL and file doesn't exist"));
}
} else {
URL source_url = new URL(url_str);
job.setStream(source_url.openConnection().getInputStream());
}
} else {
boolean url_input_source = source instanceof DiskManagerFileInfoURL;
if (device.getAlwaysCacheFiles() || url_input_source) {
PluginInterface av_pi = PluginInitializer.getDefaultInterface().getPluginManager().getPluginInterfaceByID("azupnpav");
if (av_pi == null) {
throw (new TranscodeException("Media Server plugin not found"));
}
IPCInterface av_ipc = av_pi.getIPC();
String url_str = (String) av_ipc.invoke("getContentURL", new Object[] { source });
InputStream is;
long length;
if (url_str == null || url_str.length() == 0) {
if (url_input_source) {
((DiskManagerFileInfoURL) source).download();
}
File source_file = source.getFile();
if (source_file.exists()) {
is = new BufferedInputStream(new FileInputStream(source_file));
length = source_file.length();
} else {
throw (new TranscodeException("No UPnPAV URL and file doesn't exist"));
}
} else {
URL source_url = new URL(url_str);
URLConnection connection = source_url.openConnection();
is = source_url.openConnection().getInputStream();
String s = connection.getHeaderField("content-length");
if (s != null) {
length = Long.parseLong(s);
} else {
length = -1;
}
}
OutputStream os = null;
final boolean[] cancel_copy = { false };
TranscodeQueueListener copy_listener = new TranscodeQueueListener() {
@Override
public void jobAdded(TranscodeJob job) {
}
@Override
public void jobChanged(TranscodeJob changed_job) {
if (changed_job == job) {
int state = job.getState();
if (state == TranscodeJob.ST_PAUSED) {
} else if (state == TranscodeJob.ST_RUNNING) {
} else if (state == TranscodeJob.ST_CANCELLED || state == TranscodeJob.ST_STOPPED) {
cancel_copy[0] = true;
}
}
}
@Override
public void jobRemoved(TranscodeJob removed_job) {
if (removed_job == job) {
cancel_copy[0] = true;
}
}
};
try {
addListener(copy_listener);
os = new FileOutputStream(transcode_file.getCacheFile());
long total_copied = 0;
byte[] buffer = new byte[128 * 1024];
while (true) {
if (cancel_copy[0]) {
throw (new TranscodeException("Copy cancelled"));
}
int len = is.read(buffer);
if (len <= 0) {
break;
}
os.write(buffer, 0, len);
total_copied += len;
if (length > 0) {
job.updateProgress((int) (total_copied * 100 / length), -1);
}
total_copied += len;
}
} finally {
try {
is.close();
} catch (Throwable e) {
Debug.out(e);
}
try {
if (os != null) {
os.close();
}
} catch (Throwable e) {
Debug.out(e);
}
removeListener(copy_listener);
}
}
}
}
job.complete();
return (true);
} catch (Throwable e) {
job.failed(e);
e.printStackTrace();
if (!job.isStream() && job.getEnableAutoRetry() && job.getAutoRetryCount() == 0 && job.canUseDirectInput() && !job.useDirectInput()) {
log("Auto-retrying transcode with direct input");
job.setUseDirectInput();
job.setAutoRetry(true);
queue_sem.release();
}
return (false);
} finally {
if (pipe != null) {
pipe.destroy();
}
device.setTranscoding(false);
current_job = null;
}
}
use of com.biglybt.pif.download.Download in project BiglyBT by BiglySoftware.
the class DiskManagerRandomReadController method executeRequest.
private void executeRequest() {
DiskManagerRandomReadRequestImpl request;
synchronized (requests) {
if (requests.isEmpty()) {
return;
}
request = requests.remove(0);
}
if (request.isCancelled()) {
return;
}
DiskManagerFileInfoListener info_listener = null;
com.biglybt.core.disk.DiskManagerFileInfo core_file = request.getFile().getCore();
DownloadManager core_download = core_file.getDownloadManager();
int prev_hint_piece = -1;
int curr_hint_piece = -1;
try {
if (core_download.getTorrent() == null) {
throw (new DownloadException("Torrent invalid"));
}
if (core_download.isDestroyed()) {
Debug.out("Download has been removed");
throw (new DownloadException("Download has been removed"));
}
TOTorrentFile tf = core_file.getTorrentFile();
TOTorrent torrent = tf.getTorrent();
TOTorrentFile[] tfs = torrent.getFiles();
long core_file_start_byte = 0;
for (int i = 0; i < core_file.getIndex(); i++) {
core_file_start_byte += tfs[i].getLength();
}
long download_byte_start = core_file_start_byte + request.getOffset();
long download_byte_end = download_byte_start + request.getLength();
int piece_size = (int) tf.getTorrent().getPieceLength();
if (core_file.getDownloaded() != core_file.getLength()) {
if (core_file.isSkipped()) {
core_file.setSkipped(false);
}
boolean force_start = download.isForceStart();
if (!force_start) {
download.setForceStart(true);
set_force_start = true;
final AESemaphore running_sem = new AESemaphore("rs");
DownloadListener dl_listener = new DownloadListener() {
@Override
public void stateChanged(Download download, int old_state, int new_state) {
if (new_state == Download.ST_DOWNLOADING || new_state == Download.ST_SEEDING) {
running_sem.release();
}
}
@Override
public void positionChanged(Download download, int oldPosition, int newPosition) {
}
};
download.addListener(dl_listener);
try {
if (download.getState() != Download.ST_DOWNLOADING && download.getState() != Download.ST_SEEDING) {
if (!running_sem.reserve(10 * 1000)) {
throw (new DownloadException("timeout waiting for download to start"));
}
}
} finally {
download.removeListener(dl_listener);
}
}
}
boolean is_reverse = request.isReverse();
final AESemaphore wait_sem = new AESemaphore("rr:waiter");
info_listener = new DiskManagerFileInfoListener() {
@Override
public void dataWritten(long offset, long length, Object originator) {
wait_sem.release();
}
@Override
public void dataChecked(long offset, long length) {
}
};
long start_time = SystemTime.getMonotonousTime();
boolean has_started = false;
core_file.addListener(info_listener);
while (download_byte_start < download_byte_end) {
if (request.isCancelled()) {
throw (new Exception("request cancelled"));
}
// System.out.println( "Request current: " + download_byte_start + " -> " + download_byte_end );
long now = SystemTime.getMonotonousTime();
int piece_start = (int) (download_byte_start / piece_size);
int piece_start_offset = (int) (download_byte_start % piece_size);
int piece_end = (int) ((download_byte_end - 1) / piece_size);
int piece_end_offset = (int) ((download_byte_end - 1) % piece_size) + 1;
// System.out.println( " piece details: " + piece_start + "/" + piece_start_offset + " -> " + piece_end + "/" + piece_end_offset );
DiskManagerPiece[] pieces = null;
DiskManager disk_manager = core_download.getDiskManager();
if (disk_manager != null) {
pieces = disk_manager.getPieces();
}
long avail_start;
long avail_end;
if (pieces == null) {
if (core_file.getDownloaded() == core_file.getLength()) {
avail_start = download_byte_start;
avail_end = download_byte_end;
} else {
if (now - start_time < 10000 && !has_started) {
wait_sem.reserve(250);
continue;
}
throw (new Exception("download stopped"));
}
} else {
has_started = true;
if (is_reverse) {
long min_done = download_byte_end;
for (int i = piece_end; i >= piece_start; i--) {
int p_start = i == piece_start ? piece_start_offset : 0;
int p_end = i == piece_end ? piece_end_offset : piece_size;
DiskManagerPiece piece = pieces[i];
boolean[] done = piece.getWritten();
if (done == null) {
if (piece.isDone()) {
min_done = i * (long) piece_size;
continue;
} else {
break;
}
}
int block_size = piece.getBlockSize(0);
int first_block = p_start / block_size;
int last_block = (p_end - 1) / block_size;
for (int j = last_block; j >= first_block; j--) {
if (done[j]) {
min_done = i * (long) piece_size + j * block_size;
} else {
break;
}
}
}
avail_start = Math.max(download_byte_start, min_done);
avail_end = download_byte_end;
} else {
long max_done = download_byte_start;
for (int i = piece_start; i <= piece_end; i++) {
int p_start = i == piece_start ? piece_start_offset : 0;
int p_end = i == piece_end ? piece_end_offset : piece_size;
DiskManagerPiece piece = pieces[i];
boolean[] done = piece.getWritten();
if (done == null) {
if (piece.isDone()) {
max_done = (i + 1) * (long) piece_size;
continue;
} else {
break;
}
}
int block_size = piece.getBlockSize(0);
int first_block = p_start / block_size;
int last_block = (p_end - 1) / block_size;
for (int j = first_block; j <= last_block; j++) {
if (done[j]) {
max_done = i * (long) piece_size + (j + 1) * block_size;
} else {
break;
}
}
}
avail_start = download_byte_start;
avail_end = Math.min(download_byte_end, max_done);
}
}
// System.out.println( " avail: " + avail_start + " -> " + avail_end );
int max_chunk = 128 * 1024;
if (avail_end > avail_start) {
long length = avail_end - avail_start;
if (length > max_chunk) {
if (is_reverse) {
avail_start = avail_end - max_chunk;
} else {
avail_end = avail_start + max_chunk;
}
}
// System.out.println( "got data: " + avail_start + " -> " + avail_end );
long read_offset = avail_start - core_file_start_byte;
int read_length = (int) (avail_end - avail_start);
DirectByteBuffer buffer = core_file.read(read_offset, read_length);
request.dataAvailable(buffer, read_offset, read_length);
if (is_reverse) {
download_byte_end = avail_start;
} else {
download_byte_start = avail_end;
}
continue;
}
PEPeerManager pm = core_download.getPeerManager();
if (pm == null) {
if (now - start_time < 10000 && !has_started) {
wait_sem.reserve(250);
continue;
}
throw (new Exception("download stopped"));
} else {
has_started = true;
}
PiecePicker picker = pm.getPiecePicker();
picker.setReverseBlockOrder(is_reverse);
int hint_piece;
int hint_offset;
int hint_length;
if (piece_start == piece_end) {
hint_piece = piece_start;
hint_offset = piece_start_offset;
hint_length = piece_end_offset - piece_start_offset;
} else {
if (is_reverse) {
hint_piece = piece_end;
hint_offset = 0;
hint_length = piece_end_offset;
} else {
hint_piece = piece_start;
hint_offset = piece_start_offset;
hint_length = piece_size - piece_start_offset;
}
}
if (curr_hint_piece == -1) {
int[] existing = picker.getGlobalRequestHint();
if (existing != null) {
curr_hint_piece = existing[0];
}
}
// System.out.println( "hint: " + hint_piece + "/" + hint_offset + "/" + hint_length + ": curr=" + curr_hint_piece + ", prev=" + prev_hint_piece );
picker.setGlobalRequestHint(hint_piece, hint_offset, hint_length);
if (hint_piece != curr_hint_piece) {
prev_hint_piece = curr_hint_piece;
curr_hint_piece = hint_piece;
}
if (prev_hint_piece != -1) {
clearHint(pm, prev_hint_piece);
}
wait_sem.reserve(250);
}
} catch (Throwable e) {
request.failed(e);
} finally {
PEPeerManager pm = core_download.getPeerManager();
if (pm != null) {
PiecePicker picker = pm.getPiecePicker();
if (picker != null) {
picker.setReverseBlockOrder(false);
picker.setGlobalRequestHint(-1, 0, 0);
if (curr_hint_piece != -1) {
clearHint(pm, curr_hint_piece);
}
}
}
if (info_listener != null) {
core_file.removeListener(info_listener);
}
}
}
use of com.biglybt.pif.download.Download in project BiglyBT by BiglySoftware.
the class BuddyPluginViewBetaChat method handleDrop.
private void handleDrop(Object payload, DropAccepter accepter) {
if (payload instanceof String[]) {
String[] files = (String[]) payload;
if (files.length == 0) {
Debug.out("Nothing to drop");
} else {
int hits = 0;
for (String file : files) {
File f = new File(file);
if (f.exists()) {
dropFile(f, accepter);
hits++;
}
}
if (hits == 0) {
Debug.out("Nothing files found to drop");
}
}
} else if (payload instanceof String) {
String stuff = (String) payload;
if (stuff.startsWith("DownloadManager\n") || stuff.startsWith("DiskManagerFileInfo\n")) {
String[] bits = RegExUtil.PAT_SPLIT_SLASH_N.split(stuff);
for (int i = 1; i < bits.length; i++) {
String hash_str = bits[i];
int pos = hash_str.indexOf(';');
try {
if (pos == -1) {
byte[] hash = Base32.decode(bits[i]);
Download download = CoreFactory.getSingleton().getPluginManager().getDefaultPluginInterface().getShortCuts().getDownload(hash);
dropDownload(download, accepter);
} else {
String[] files = hash_str.split(";");
byte[] hash = Base32.decode(files[0].trim());
DiskManagerFileInfo[] dm_files = CoreFactory.getSingleton().getPluginManager().getDefaultPluginInterface().getShortCuts().getDownload(hash).getDiskManagerFileInfo();
for (int j = 1; j < files.length; j++) {
DiskManagerFileInfo dm_file = dm_files[Integer.parseInt(files[j].trim())];
dropDownloadFile(dm_file, accepter);
}
}
} catch (Throwable e) {
Debug.out("Failed to get download for hash " + bits[1]);
}
}
} else if (stuff.startsWith("TranscodeFile\n")) {
String[] bits = RegExUtil.PAT_SPLIT_SLASH_N.split(stuff);
for (int i = 1; i < bits.length; i++) {
File f = new File(bits[i]);
if (f.isFile()) {
dropFile(f, accepter);
}
}
} else {
File f = new File(stuff);
if (f.exists()) {
dropFile(f, accepter);
} else {
String lc_stuff = stuff.toLowerCase(Locale.US);
if (lc_stuff.startsWith("http:") || lc_stuff.startsWith("https:") || lc_stuff.startsWith("magnet: ")) {
dropURL(stuff, accepter);
} else {
Debug.out("Failed to handle drop for '" + stuff + "'");
}
}
}
} else if (payload instanceof FixedURLTransfer.URLType) {
String url = ((FixedURLTransfer.URLType) payload).linkURL;
if (url != null) {
dropURL(url, accepter);
} else {
Debug.out("Failed to handle drop for '" + payload + "'");
}
}
}
Aggregations