Search in sources :

Example 1 with SoxController

use of net.sourceforge.sox.SoxController in project storymaker by StoryMaker.

the class MediaAudioExporter method concatMediaFiles.

private void concatMediaFiles(ArrayList<MediaDesc> listMediaDesc, MediaDesc mdout) throws Exception {
    //now add 1 second cross fade to each audio file and cat them together
    SoxController sxCon = new SoxController(mContext, sc);
    int exportBitRate = mdout.audioBitrate;
    String exportCodec = mdout.audioCodec;
    FfmpegController ffmpegc = new FfmpegController(mContext, mFileTemp);
    ArrayList<MediaDesc> alAudio = new ArrayList<MediaDesc>();
    Message msg = null;
    //convert each input file to a WAV so we can use Sox to process
    int wavIdx = 0;
    for (MediaDesc mediaIn : listMediaDesc) {
        if (new File(mediaIn.path).exists()) {
            msg = mHandler.obtainMessage(0);
            msg.getData().putString("status", mContext.getString(R.string.extracting_audio_multiple) + " " + (wavIdx + 1) + "/" + listMediaDesc.size());
            mHandler.sendMessage(msg);
            MediaDesc audioOut = ffmpegc.convertToWaveAudio(mediaIn, new File(mFileTemp, wavIdx + ".wav").getCanonicalPath(), mAudioSampleRate, CHANNELS, sc);
            alAudio.add(audioOut);
            float duration = (float) sxCon.getLength(new File(audioOut.path).getCanonicalPath());
            Timber.d("got clip " + wavIdx + " length: " + duration);
            if (mediaIn.duration == null) {
                mediaIn.duration = String.format(Locale.US, "%f", duration);
            } else {
                Timber.d("found clip " + wavIdx + " existing length: " + mediaIn.duration);
            }
            wavIdx++;
        } else {
            throw new FileNotFoundException(mediaIn.path);
        }
    }
    String fileOut = alAudio.get(0).path;
    msg = mHandler.obtainMessage(0);
    msg.getData().putString("status", mContext.getString(R.string.crossfading_audio));
    mHandler.sendMessage(msg);
    for (int i = 1; i < alAudio.size(); i++) {
        if (i > 1)
            alAudio.get(0).audioVolume = 1.0f;
        //String fileAdd = new File(alAudio.get(i).path).getCanonicalPath();
        CrossfadeCat xCat = new CrossfadeCat(sxCon, alAudio.get(0), alAudio.get(i), fadeLen, alAudio.get(0));
        xCat.start();
        msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.crossfading_audio_multiple) + " " + (i + 1) + "/" + alAudio.size());
        mHandler.sendMessage(msg);
    }
    msg = mHandler.obtainMessage(0);
    msg.getData().putString("status", mContext.getString(R.string.fade_audio));
    mHandler.sendMessage(msg);
    //1 second fade in and fade out, t = triangle or linear
    //String fadeLenStr = sxCon.formatTimePeriod(fadeLen);
    String fadeFileOut = sxCon.fadeAudio(fileOut, fadeType, fadeLen, 0, fadeLen);
    //now export the final file to our requested output format		    mOut.mimeType = AppConstants.MimeTypes.MP4_AUDIO;
    MediaDesc mdFinalIn = new MediaDesc();
    mdFinalIn.path = fadeFileOut;
    mdout.audioBitrate = exportBitRate;
    mdout.audioCodec = exportCodec;
    msg = mHandler.obtainMessage(0);
    msg.getData().putString("status", mContext.getString(R.string.converting_audio));
    mHandler.sendMessage(msg);
    MediaDesc exportOut = ffmpegc.convertTo3GPAudio(mdFinalIn, mdout, sc);
}
Also used : Message(android.os.Message) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) FfmpegController(org.ffmpeg.android.FfmpegController) MediaDesc(org.ffmpeg.android.MediaDesc) SoxController(net.sourceforge.sox.SoxController) File(java.io.File) CrossfadeCat(net.sourceforge.sox.CrossfadeCat)

Example 2 with SoxController

use of net.sourceforge.sox.SoxController in project storymaker by StoryMaker.

the class MediaVideoExporter method export.

public void export() {
    try {
        //get lengths of all clips
        ffmpegc = new FfmpegController(mContext, mFileProject);
        //first let's get the audio done
        maOut = new MediaDesc();
        maOut.path = new File(mFileProject, "tmp.wav").getCanonicalPath();
        Message msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.processing_audio));
        mHandler.sendMessage(msg);
        //export video clips and crossfade
        maExport = new MediaAudioExporter(mContext, mHandler, mMediaList, mFileProject, maOut);
        maExport.setFadeLength(mFadeLen);
        maExport.export();
        SoxController sxCon = new SoxController(mContext, sc);
        //now merge all audio tracks into main audio track
        if (mAudioTracks.size() > 0) {
            ArrayList<MediaDesc> mAudioTracksPaths = new ArrayList<MediaDesc>();
            int idxAudioTracks = 0;
            for (MediaDesc audioTrack : mAudioTracks) {
                msg = mHandler.obtainMessage(0);
                msg.getData().putString("status", mContext.getString(R.string.processing_audio_multiple) + " " + (idxAudioTracks + 1) + "/" + mAudioTracks.size());
                mHandler.sendMessage(msg);
                File fileAudioTrack = new File(mFileProject, idxAudioTracks + "-tmp.wav");
                MediaDesc out = ffmpegc.convertToWaveAudio(audioTrack, fileAudioTrack.getCanonicalPath(), mAudioSampleRate, MediaAudioExporter.CHANNELS, sc);
                Double startTime = Double.parseDouble(audioTrack.startTime);
                Double length = Double.parseDouble(audioTrack.duration);
                if (startTime < 0) {
                    // negative start time means we need to push it over to the right
                    startTime = -startTime;
                }
                // FIXME temporarily disable narration delay to match position in story while we switch to fixing localization
                //                    out.path = sxCon.delayAudio(out.path, startTime, length);
                mAudioTracksPaths.add(out);
                idxAudioTracks++;
            }
            mAudioTracksPaths.add(maOut);
            MediaDesc finalAudioMix = new MediaDesc();
            finalAudioMix.path = maOut.path + "-mix.wav";
            msg = mHandler.obtainMessage(0);
            msg.getData().putString("status", mContext.getString(R.string.mixing_tracks));
            mHandler.sendMessage(msg);
            sxCon.combineMix(mAudioTracksPaths, finalAudioMix);
            if (!new File(finalAudioMix.path).exists()) {
                throw new Exception("Audio rendering error");
            }
            maOut.path = finalAudioMix.path;
        }
        MediaDesc mMerge = new MediaDesc();
        mMerge.path = new File(mFileProject, "merge.mp4").getCanonicalPath();
        float videoFadeLen = mFadeLen;
        for (int i = 0; i < mMediaList.size(); i++) {
            MediaDesc media = mMediaList.get(i);
            if (media.duration == null) {
                media = ffmpegc.getInfo(media);
                media.duration = String.format(Locale.US, "%f", Float.parseFloat(media.duration) - (videoFadeLen));
            } else {
                float newDuration = Float.parseFloat(media.duration) - (videoFadeLen);
                media.duration = String.format(Locale.US, "%f", newDuration);
            }
            Timber.d("video clip start=" + media.startTime + " length=" + media.duration);
        }
        msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.trimming_merging));
        mHandler.sendMessage(msg);
        ffmpegc.concatAndTrimFilesMP4Stream(mMediaList, mMerge, mPreconvertClipsToMP4, mUseCatCmd, sc);
        msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.merging_video_audio));
        mHandler.sendMessage(msg);
        ffmpegc.combineAudioAndVideo(mMerge, maOut, mOut, sc);
        //processing complete message
        msg = mHandler.obtainMessage(0);
        mHandler.sendMessage(msg);
        //now scan for media to add to gallery
        File fileTest = new File(mOut.path);
        if (fileTest.exists() && fileTest.length() > 0) {
            MediaScannerConnection.scanFile(mContext, new String[] { mOut.path }, new String[] { mOut.mimeType }, new OnScanCompletedListener() {

                @Override
                public void onScanCompleted(String path, Uri uri) {
                    Message msg = mHandler.obtainMessage(4);
                    msg.getData().putString("path", mOut.path);
                    mHandler.sendMessage(msg);
                }
            });
        } else {
            msg = mHandler.obtainMessage(0);
            msg.getData().putString("error", mContext.getString(R.string.export_error_3));
            mHandler.sendMessage(msg);
        }
    } catch (Exception e) {
        Message msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.export_error_4) + ": " + e.getMessage());
        mHandler.sendMessage(msg);
        Timber.e(e, "error exporting");
    }
}
Also used : OnScanCompletedListener(android.media.MediaScannerConnection.OnScanCompletedListener) Message(android.os.Message) ArrayList(java.util.ArrayList) MediaDesc(org.ffmpeg.android.MediaDesc) Uri(android.net.Uri) FfmpegController(org.ffmpeg.android.FfmpegController) File(java.io.File) SoxController(net.sourceforge.sox.SoxController)

Aggregations

Message (android.os.Message)2 File (java.io.File)2 ArrayList (java.util.ArrayList)2 SoxController (net.sourceforge.sox.SoxController)2 FfmpegController (org.ffmpeg.android.FfmpegController)2 MediaDesc (org.ffmpeg.android.MediaDesc)2 OnScanCompletedListener (android.media.MediaScannerConnection.OnScanCompletedListener)1 Uri (android.net.Uri)1 FileNotFoundException (java.io.FileNotFoundException)1 CrossfadeCat (net.sourceforge.sox.CrossfadeCat)1