use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class TranscodeQueueImpl method process.
protected boolean process(final TranscodeJobImpl job) {
TranscodePipe pipe = null;
current_job = job;
DeviceImpl device = job.getDevice();
device.setTranscoding(true);
try {
job.starts();
TranscodeProvider provider = job.getProfile().getProvider();
final TranscodeException[] error = { null };
TranscodeProfile profile = job.getProfile();
final TranscodeFileImpl transcode_file = job.getTranscodeFile();
TranscodeProviderAnalysis provider_analysis;
boolean xcode_required;
if (provider == null) {
xcode_required = false;
provider_analysis = null;
} else {
provider_analysis = analyse(job);
xcode_required = provider_analysis.getBooleanProperty(TranscodeProviderAnalysis.PT_TRANSCODE_REQUIRED);
int tt_req;
if (job.isStream()) {
// already advertised as a transcoded asset so no option not to
// transcode (as name/format would change if decided not to transcode and then
// this would confuse the clients)
tt_req = TranscodeTarget.TRANSCODE_ALWAYS;
} else {
tt_req = job.getTranscodeRequirement();
if (device instanceof TranscodeTarget) {
if (provider_analysis.getLongProperty(TranscodeProviderAnalysis.PT_VIDEO_HEIGHT) == 0) {
if (((TranscodeTarget) device).isAudioCompatible(transcode_file)) {
tt_req = TranscodeTarget.TRANSCODE_NEVER;
}
}
}
}
if (tt_req == TranscodeTarget.TRANSCODE_NEVER) {
xcode_required = false;
} else if (tt_req == TranscodeTarget.TRANSCODE_ALWAYS) {
xcode_required = true;
provider_analysis.setBooleanProperty(TranscodeProviderAnalysis.PT_FORCE_TRANSCODE, true);
}
}
if (xcode_required) {
final AESemaphore xcode_sem = new AESemaphore("xcode:proc");
final TranscodeProviderJob[] provider_job = { null };
TranscodeProviderAdapter xcode_adapter = new TranscodeProviderAdapter() {
private boolean resolution_updated;
private final int ETA_AVERAGE_SIZE = 10;
private int last_eta;
private int eta_samples;
private Average eta_average = AverageFactory.MovingAverage(ETA_AVERAGE_SIZE);
private int last_percent;
private long initial_file_downloaded = job.getFile().getDownloaded();
private long file_size = job.getFile().getLength();
@Override
public void updateProgress(int percent, int eta_secs, int new_width, int new_height) {
last_eta = eta_secs;
last_percent = percent;
TranscodeProviderJob prov_job = provider_job[0];
if (prov_job == null) {
return;
}
int job_state = job.getState();
if (job_state == TranscodeJob.ST_CANCELLED || job_state == TranscodeJob.ST_REMOVED) {
prov_job.cancel();
} else if (paused || job_state == TranscodeJob.ST_PAUSED) {
prov_job.pause();
} else {
if (job_state == TranscodeJob.ST_RUNNING) {
prov_job.resume();
}
job.updateProgress(percent, eta_secs);
prov_job.setMaxBytesPerSecond(max_bytes_per_sec);
if (!resolution_updated) {
if (new_width > 0 && new_height > 0) {
transcode_file.setResolution(new_width, new_height);
resolution_updated = true;
}
}
}
}
@Override
public void streamStats(long connect_rate, long write_speed) {
if (Constants.isOSX && job.getEnableAutoRetry() && job.canUseDirectInput() && job.getAutoRetryCount() == 0) {
if (connect_rate > 5 && last_percent < 100) {
long eta = (long) eta_average.update(last_eta);
eta_samples++;
if (eta_samples >= ETA_AVERAGE_SIZE) {
long total_time = (eta * 100) / (100 - last_percent);
long total_write = total_time * write_speed;
DiskManagerFileInfo file = job.getFile();
long length = file.getLength();
if (length > 0) {
double over_write = ((double) total_write) / length;
if (over_write > 5.0) {
failed(new TranscodeException("Overwrite limit exceeded, abandoning transcode"));
provider_job[0].cancel();
}
}
}
} else {
eta_samples = 0;
}
}
}
@Override
public void failed(TranscodeException e) {
try {
if (error[0] == null) {
error[0] = e;
}
if (e.isRetryDisabled()) {
job.setEnableAutoRetry(false);
}
} finally {
xcode_sem.release();
}
}
@Override
public void complete() {
try {
// sanity check: for incomplete files at the start of the process ensure that they have completed
long current_downloaded = job.getFile().getDownloaded();
if (file_size > 0 && initial_file_downloaded < file_size && current_downloaded < file_size) {
if (error[0] == null) {
// actually this ain't so simple as we stream data prior to hash check completion (otherwise for
// large piece sizes we could be waiting for 4MB to complete downloading before playback)
// and getDownloaded() only returns the verified data size
long contiguous_downloaded = 0;
try {
DiskManagerFileInfo _file_info = job.getFile();
Download download = _file_info.getDownload();
com.biglybt.core.disk.DiskManagerFileInfo file_info = PluginCoreUtils.unwrap(_file_info);
TOTorrentFile torrent_file = file_info.getTorrentFile();
TOTorrent torrent = torrent_file.getTorrent();
TOTorrentFile[] torrent_files = torrent.getFiles();
long byte_start = 0;
for (TOTorrentFile tf : torrent_files) {
if (tf == torrent_file) {
break;
}
byte_start += tf.getLength();
}
DiskManager dm = download.getDiskManager();
if (dm == null) {
throw (new Exception("Download stopped"));
}
DiskManagerPiece[] pieces = PluginCoreUtils.unwrap(dm).getPieces();
long piece_size = torrent.getPieceLength();
int first_piece_index = (int) (byte_start / piece_size);
int first_piece_offset = (int) (byte_start % piece_size);
int last_piece_index = torrent_file.getLastPieceNumber();
DiskManagerPiece first_piece = pieces[first_piece_index];
if (!first_piece.isDone()) {
boolean[] blocks = first_piece.getWritten();
if (blocks == null) {
if (first_piece.isDone()) {
contiguous_downloaded = first_piece.getLength() - first_piece_offset;
}
} else {
int piece_offset = 0;
for (int j = 0; j < blocks.length; j++) {
if (blocks[j]) {
int block_size = first_piece.getBlockSize(j);
piece_offset = piece_offset + block_size;
if (contiguous_downloaded == 0) {
if (piece_offset > first_piece_offset) {
contiguous_downloaded = piece_offset - first_piece_offset;
}
} else {
contiguous_downloaded += block_size;
}
} else {
break;
}
}
}
} else {
contiguous_downloaded = first_piece.getLength() - first_piece_offset;
for (int i = first_piece_index + 1; i <= last_piece_index; i++) {
DiskManagerPiece piece = pieces[i];
if (piece.isDone()) {
contiguous_downloaded += piece.getLength();
} else {
boolean[] blocks = piece.getWritten();
if (blocks == null) {
if (piece.isDone()) {
contiguous_downloaded += piece.getLength();
} else {
break;
}
} else {
for (int j = 0; j < blocks.length; j++) {
if (blocks[j]) {
contiguous_downloaded += piece.getBlockSize(j);
} else {
break;
}
}
}
break;
}
}
}
} catch (Throwable e) {
// Debug.out( e );
}
if (contiguous_downloaded < file_size) {
// things might have improved, check again
current_downloaded = job.getFile().getDownloaded();
if (current_downloaded < file_size) {
Debug.out("Premature transcode termination: init=" + initial_file_downloaded + ", curr=" + current_downloaded + ", len=" + file_size);
error[0] = new TranscodeException("Transcode terminated prematurely");
}
}
}
}
} finally {
xcode_sem.release();
}
}
};
boolean direct_input = job.useDirectInput();
if (job.isStream()) {
/*
provider_job[0] =
provider.transcode(
adapter,
job.getFile(),
profile,
new File( "C:\\temp\\arse").toURI().toURL());
*/
pipe = new TranscodePipeStreamSource2(new TranscodePipeStreamSource2.streamListener() {
@Override
public void gotStream(InputStream is) {
job.setStream(is);
}
});
provider_job[0] = provider.transcode(xcode_adapter, provider_analysis, direct_input, job.getFile(), profile, new URL("tcp://127.0.0.1:" + pipe.getPort()));
} else {
File output_file = transcode_file.getCacheFile();
provider_job[0] = provider.transcode(xcode_adapter, provider_analysis, direct_input, job.getFile(), profile, output_file.toURI().toURL());
}
provider_job[0].setMaxBytesPerSecond(max_bytes_per_sec);
TranscodeQueueListener listener = new TranscodeQueueListener() {
@Override
public void jobAdded(TranscodeJob job) {
}
@Override
public void jobChanged(TranscodeJob changed_job) {
if (changed_job == job) {
int state = job.getState();
if (state == TranscodeJob.ST_PAUSED) {
provider_job[0].pause();
} else if (state == TranscodeJob.ST_RUNNING) {
provider_job[0].resume();
} else if (state == TranscodeJob.ST_CANCELLED || state == TranscodeJob.ST_STOPPED) {
provider_job[0].cancel();
}
}
}
@Override
public void jobRemoved(TranscodeJob removed_job) {
if (removed_job == job) {
provider_job[0].cancel();
}
}
};
try {
addListener(listener);
xcode_sem.reserve();
} finally {
removeListener(listener);
}
if (error[0] != null) {
throw (error[0]);
}
} else {
// no transcode required...
DiskManagerFileInfo source = job.getFile();
transcode_file.setTranscodeRequired(false);
if (job.isStream()) {
PluginInterface av_pi = PluginInitializer.getDefaultInterface().getPluginManager().getPluginInterfaceByID("azupnpav");
if (av_pi == null) {
throw (new TranscodeException("Media Server plugin not found"));
}
IPCInterface av_ipc = av_pi.getIPC();
String url_str = (String) av_ipc.invoke("getContentURL", new Object[] { source });
if (url_str == null || url_str.length() == 0) {
// see if we can use the file directly
File source_file = source.getFile();
if (source_file.exists()) {
job.setStream(new BufferedInputStream(new FileInputStream(source_file)));
} else {
throw (new TranscodeException("No UPnPAV URL and file doesn't exist"));
}
} else {
URL source_url = new URL(url_str);
job.setStream(source_url.openConnection().getInputStream());
}
} else {
boolean url_input_source = source instanceof DiskManagerFileInfoURL;
if (device.getAlwaysCacheFiles() || url_input_source) {
PluginInterface av_pi = PluginInitializer.getDefaultInterface().getPluginManager().getPluginInterfaceByID("azupnpav");
if (av_pi == null) {
throw (new TranscodeException("Media Server plugin not found"));
}
IPCInterface av_ipc = av_pi.getIPC();
String url_str = (String) av_ipc.invoke("getContentURL", new Object[] { source });
InputStream is;
long length;
if (url_str == null || url_str.length() == 0) {
if (url_input_source) {
((DiskManagerFileInfoURL) source).download();
}
File source_file = source.getFile();
if (source_file.exists()) {
is = new BufferedInputStream(new FileInputStream(source_file));
length = source_file.length();
} else {
throw (new TranscodeException("No UPnPAV URL and file doesn't exist"));
}
} else {
URL source_url = new URL(url_str);
URLConnection connection = source_url.openConnection();
is = source_url.openConnection().getInputStream();
String s = connection.getHeaderField("content-length");
if (s != null) {
length = Long.parseLong(s);
} else {
length = -1;
}
}
OutputStream os = null;
final boolean[] cancel_copy = { false };
TranscodeQueueListener copy_listener = new TranscodeQueueListener() {
@Override
public void jobAdded(TranscodeJob job) {
}
@Override
public void jobChanged(TranscodeJob changed_job) {
if (changed_job == job) {
int state = job.getState();
if (state == TranscodeJob.ST_PAUSED) {
} else if (state == TranscodeJob.ST_RUNNING) {
} else if (state == TranscodeJob.ST_CANCELLED || state == TranscodeJob.ST_STOPPED) {
cancel_copy[0] = true;
}
}
}
@Override
public void jobRemoved(TranscodeJob removed_job) {
if (removed_job == job) {
cancel_copy[0] = true;
}
}
};
try {
addListener(copy_listener);
os = new FileOutputStream(transcode_file.getCacheFile());
long total_copied = 0;
byte[] buffer = new byte[128 * 1024];
while (true) {
if (cancel_copy[0]) {
throw (new TranscodeException("Copy cancelled"));
}
int len = is.read(buffer);
if (len <= 0) {
break;
}
os.write(buffer, 0, len);
total_copied += len;
if (length > 0) {
job.updateProgress((int) (total_copied * 100 / length), -1);
}
total_copied += len;
}
} finally {
try {
is.close();
} catch (Throwable e) {
Debug.out(e);
}
try {
if (os != null) {
os.close();
}
} catch (Throwable e) {
Debug.out(e);
}
removeListener(copy_listener);
}
}
}
}
job.complete();
return (true);
} catch (Throwable e) {
job.failed(e);
e.printStackTrace();
if (!job.isStream() && job.getEnableAutoRetry() && job.getAutoRetryCount() == 0 && job.canUseDirectInput() && !job.useDirectInput()) {
log("Auto-retrying transcode with direct input");
job.setUseDirectInput();
job.setAutoRetry(true);
queue_sem.release();
}
return (false);
} finally {
if (pipe != null) {
pipe.destroy();
}
device.setTranscoding(false);
current_job = null;
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class DiskManagerImpl method countDataFiles.
private static int countDataFiles(TOTorrent torrent, String torrent_save_dir, String torrent_save_file) {
try {
int res = 0;
LocaleUtilDecoder locale_decoder = LocaleTorrentUtil.getTorrentEncoding(torrent);
TOTorrentFile[] files = torrent.getFiles();
for (int i = 0; i < files.length; i++) {
byte[][] path_comps = files[i].getPathComponents();
String path_str = torrent_save_dir + File.separator + torrent_save_file + File.separator;
for (int j = 0; j < path_comps.length; j++) {
String comp = locale_decoder.decodeString(path_comps[j]);
comp = FileUtil.convertOSSpecificChars(comp, j != path_comps.length - 1);
path_str += (j == 0 ? "" : File.separator) + comp;
}
File file = new File(path_str).getCanonicalFile();
File linked_file = FMFileManagerFactory.getSingleton().getFileLink(torrent, i, file);
boolean skip = false;
if (linked_file != file) {
if (!linked_file.getCanonicalPath().startsWith(new File(torrent_save_dir).getCanonicalPath())) {
skip = true;
}
}
if (!skip && file.exists() && !file.isDirectory()) {
res++;
}
}
return (res);
} catch (Throwable e) {
Debug.printStackTrace(e);
return (-1);
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class DiskManagerImpl method deleteDataFileContents.
private static void deleteDataFileContents(TOTorrent torrent, String torrent_save_dir, String torrent_save_file, boolean force_no_recycle) throws TOTorrentException, UnsupportedEncodingException, LocaleUtilEncodingException {
LocaleUtilDecoder locale_decoder = LocaleTorrentUtil.getTorrentEncoding(torrent);
TOTorrentFile[] files = torrent.getFiles();
String root_path = torrent_save_dir + File.separator + torrent_save_file + File.separator;
boolean delete_if_not_in_dir = COConfigurationManager.getBooleanParameter("File.delete.include_files_outside_save_dir");
for (int i = 0; i < files.length; i++) {
byte[][] path_comps = files[i].getPathComponents();
String path_str = root_path;
for (int j = 0; j < path_comps.length; j++) {
try {
String comp = locale_decoder.decodeString(path_comps[j]);
comp = FileUtil.convertOSSpecificChars(comp, j != path_comps.length - 1);
path_str += (j == 0 ? "" : File.separator) + comp;
} catch (UnsupportedEncodingException e) {
Debug.out("file - unsupported encoding!!!!");
}
}
File file = new File(path_str);
File linked_file = FMFileManagerFactory.getSingleton().getFileLink(torrent, i, file);
boolean delete;
if (linked_file == file) {
delete = true;
} else {
try {
if (delete_if_not_in_dir || linked_file.getCanonicalPath().startsWith(new File(root_path).getCanonicalPath())) {
file = linked_file;
delete = true;
} else {
delete = false;
}
} catch (Throwable e) {
Debug.printStackTrace(e);
delete = false;
}
}
if (delete && file.exists() && !file.isDirectory()) {
try {
FileUtil.deleteWithRecycle(file, force_no_recycle);
} catch (Exception e) {
Debug.out(e.toString());
}
}
}
TorrentUtils.recursiveEmptyDirDelete(new File(torrent_save_dir, torrent_save_file));
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class FMFileAccessController method setControlFile.
protected void setControlFile() {
TOTorrentFile tf = owner.getOwner().getTorrentFile();
if (tf == null) {
controlFileName = null;
control_dir = null;
} else {
TOTorrent torrent = tf.getTorrent();
TOTorrentFile[] files = torrent.getFiles();
int file_index = -1;
for (int i = 0; i < files.length; i++) {
if (files[i] == tf) {
file_index = i;
break;
}
}
if (file_index == -1) {
Debug.out("File '" + owner.getName() + "' not found in torrent!");
controlFileName = null;
control_dir = null;
} else {
control_dir = owner.getOwner().getControlFileDir();
controlFileName = StringInterner.intern("fmfile" + file_index + ".dat");
}
}
}
use of com.biglybt.core.torrent.TOTorrentFile in project BiglyBT by BiglySoftware.
the class FMFileAccessPieceReorderer method recoverConfig.
protected static void recoverConfig(TOTorrentFile torrent_file, File data_file, File config_file, int storage_type) throws FMFileManagerException {
// most likely add-for-seeding which means a recheck will occur. just map all existing pieces
// to their correct positions and let the recheck sort things out
int first_piece_number = torrent_file.getFirstPieceNumber();
int num_pieces = torrent_file.getLastPieceNumber() - first_piece_number + 1;
int piece_size = (int) torrent_file.getTorrent().getPieceLength();
int[] piece_map = new int[num_pieces];
Arrays.fill(piece_map, -1);
piece_map[0] = 0;
long current_length = data_file.length();
int piece_count = (int) ((current_length + piece_size - 1) / piece_size) + 1;
if (piece_count > num_pieces) {
piece_count = num_pieces;
}
for (int i = 1; i < piece_count; i++) {
piece_map[i] = i;
}
int next_piece_index = piece_count;
Map map = encodeConfig(storage_type, current_length, next_piece_index, piece_map);
File control_dir = config_file.getParentFile();
if (!control_dir.exists()) {
control_dir.mkdirs();
}
if (!FileUtil.writeResilientFileWithResult(control_dir, config_file.getName(), map)) {
throw (new FMFileManagerException("Failed to write control file " + config_file.getAbsolutePath()));
}
}
Aggregations