use of org.ffmpeg.android.FfmpegController in project storymaker by StoryMaker.
the class MediaRenderer method prerenderAudio.
private MediaDesc prerenderAudio(MediaDesc mediaIn) throws Exception {
FfmpegController ffmpegc = new FfmpegController(mContext, mFileExternDir);
File outPath = createOutputFile(mediaIn.path, "mp4");
mMediaManager.applyExportSettings(mediaIn);
mediaIn.videoCodec = null;
mediaIn.mimeType = "audio/3gp";
MediaDesc mediaOut = ffmpegc.convertToMP4Stream(mediaIn, mediaIn.startTime, mediaIn.duration, outPath.getAbsolutePath(), mShellCallback);
return mediaOut;
}
use of org.ffmpeg.android.FfmpegController in project storymaker by StoryMaker.
the class MediaVideoExporter method export.
public void export() {
try {
// get lengths of all clips
ffmpegc = new FfmpegController(mContext, mFileProject);
// first let's get the audio done
maOut = new MediaDesc();
maOut.path = new File(mFileProject, "tmp.wav").getCanonicalPath();
Message msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.processing_audio));
mHandler.sendMessage(msg);
// export video clips and crossfade
maExport = new MediaAudioExporter(mContext, mHandler, mMediaList, mFileProject, maOut);
maExport.setFadeLength(mFadeLen);
maExport.export();
SoxController sxCon = new SoxController(mContext, sc);
// now merge all audio tracks into main audio track
if (mAudioTracks.size() > 0) {
ArrayList<MediaDesc> mAudioTracksPaths = new ArrayList<MediaDesc>();
int idxAudioTracks = 0;
for (MediaDesc audioTrack : mAudioTracks) {
msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.processing_audio_multiple) + " " + (idxAudioTracks + 1) + "/" + mAudioTracks.size());
mHandler.sendMessage(msg);
File fileAudioTrack = new File(mFileProject, idxAudioTracks + "-tmp.wav");
MediaDesc out = ffmpegc.convertToWaveAudio(audioTrack, fileAudioTrack.getCanonicalPath(), mAudioSampleRate, MediaAudioExporter.CHANNELS, sc);
Double startTime = Double.parseDouble(audioTrack.startTime);
Double length = Double.parseDouble(audioTrack.duration);
if (startTime < 0) {
// negative start time means we need to push it over to the right
startTime = -startTime;
}
// FIXME temporarily disable narration delay to match position in story while we switch to fixing localization
// out.path = sxCon.delayAudio(out.path, startTime, length);
mAudioTracksPaths.add(out);
idxAudioTracks++;
}
mAudioTracksPaths.add(maOut);
MediaDesc finalAudioMix = new MediaDesc();
finalAudioMix.path = maOut.path + "-mix.wav";
msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.mixing_tracks));
mHandler.sendMessage(msg);
sxCon.combineMix(mAudioTracksPaths, finalAudioMix);
if (!new File(finalAudioMix.path).exists()) {
throw new Exception("Audio rendering error");
}
maOut.path = finalAudioMix.path;
}
MediaDesc mMerge = new MediaDesc();
mMerge.path = new File(mFileProject, "merge.mp4").getCanonicalPath();
float videoFadeLen = mFadeLen;
for (int i = 0; i < mMediaList.size(); i++) {
MediaDesc media = mMediaList.get(i);
if (media.duration == null) {
media = ffmpegc.getInfo(media);
media.duration = String.format(Locale.US, "%f", Float.parseFloat(media.duration) - (videoFadeLen));
} else {
float newDuration = Float.parseFloat(media.duration) - (videoFadeLen);
media.duration = String.format(Locale.US, "%f", newDuration);
}
Timber.d("video clip start=" + media.startTime + " length=" + media.duration);
}
msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.trimming_merging));
mHandler.sendMessage(msg);
ffmpegc.concatAndTrimFilesMP4Stream(mMediaList, mMerge, mPreconvertClipsToMP4, mUseCatCmd, sc);
msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.merging_video_audio));
mHandler.sendMessage(msg);
ffmpegc.combineAudioAndVideo(mMerge, maOut, mOut, sc);
// processing complete message
msg = mHandler.obtainMessage(0);
mHandler.sendMessage(msg);
// now scan for media to add to gallery
File fileTest = new File(mOut.path);
if (fileTest.exists() && fileTest.length() > 0) {
MediaScannerConnection.scanFile(mContext, new String[] { mOut.path }, new String[] { mOut.mimeType }, new OnScanCompletedListener() {
@Override
public void onScanCompleted(String path, Uri uri) {
Message msg = mHandler.obtainMessage(4);
msg.getData().putString("path", mOut.path);
mHandler.sendMessage(msg);
}
});
} else {
msg = mHandler.obtainMessage(0);
msg.getData().putString("error", mContext.getString(R.string.export_error_3));
mHandler.sendMessage(msg);
}
} catch (Exception e) {
Message msg = mHandler.obtainMessage(0);
msg.getData().putString("status", mContext.getString(R.string.export_error_4) + ": " + e.getMessage());
mHandler.sendMessage(msg);
Timber.e(e, "error exporting");
}
}
Aggregations