Search in sources :

Example 1 with OnScanCompletedListener

use of android.media.MediaScannerConnection.OnScanCompletedListener in project storymaker by StoryMaker.

the class MediaVideoExporter method export.

public void export() {
    try {
        //get lengths of all clips
        ffmpegc = new FfmpegController(mContext, mFileProject);
        //first let's get the audio done
        maOut = new MediaDesc();
        maOut.path = new File(mFileProject, "tmp.wav").getCanonicalPath();
        Message msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.processing_audio));
        mHandler.sendMessage(msg);
        //export video clips and crossfade
        maExport = new MediaAudioExporter(mContext, mHandler, mMediaList, mFileProject, maOut);
        maExport.setFadeLength(mFadeLen);
        maExport.export();
        SoxController sxCon = new SoxController(mContext, sc);
        //now merge all audio tracks into main audio track
        if (mAudioTracks.size() > 0) {
            ArrayList<MediaDesc> mAudioTracksPaths = new ArrayList<MediaDesc>();
            int idxAudioTracks = 0;
            for (MediaDesc audioTrack : mAudioTracks) {
                msg = mHandler.obtainMessage(0);
                msg.getData().putString("status", mContext.getString(R.string.processing_audio_multiple) + " " + (idxAudioTracks + 1) + "/" + mAudioTracks.size());
                mHandler.sendMessage(msg);
                File fileAudioTrack = new File(mFileProject, idxAudioTracks + "-tmp.wav");
                MediaDesc out = ffmpegc.convertToWaveAudio(audioTrack, fileAudioTrack.getCanonicalPath(), mAudioSampleRate, MediaAudioExporter.CHANNELS, sc);
                Double startTime = Double.parseDouble(audioTrack.startTime);
                Double length = Double.parseDouble(audioTrack.duration);
                if (startTime < 0) {
                    // negative start time means we need to push it over to the right
                    startTime = -startTime;
                }
                // FIXME temporarily disable narration delay to match position in story while we switch to fixing localization
                //                    out.path = sxCon.delayAudio(out.path, startTime, length);
                mAudioTracksPaths.add(out);
                idxAudioTracks++;
            }
            mAudioTracksPaths.add(maOut);
            MediaDesc finalAudioMix = new MediaDesc();
            finalAudioMix.path = maOut.path + "-mix.wav";
            msg = mHandler.obtainMessage(0);
            msg.getData().putString("status", mContext.getString(R.string.mixing_tracks));
            mHandler.sendMessage(msg);
            sxCon.combineMix(mAudioTracksPaths, finalAudioMix);
            if (!new File(finalAudioMix.path).exists()) {
                throw new Exception("Audio rendering error");
            }
            maOut.path = finalAudioMix.path;
        }
        MediaDesc mMerge = new MediaDesc();
        mMerge.path = new File(mFileProject, "merge.mp4").getCanonicalPath();
        float videoFadeLen = mFadeLen;
        for (int i = 0; i < mMediaList.size(); i++) {
            MediaDesc media = mMediaList.get(i);
            if (media.duration == null) {
                media = ffmpegc.getInfo(media);
                media.duration = String.format(Locale.US, "%f", Float.parseFloat(media.duration) - (videoFadeLen));
            } else {
                float newDuration = Float.parseFloat(media.duration) - (videoFadeLen);
                media.duration = String.format(Locale.US, "%f", newDuration);
            }
            Timber.d("video clip start=" + media.startTime + " length=" + media.duration);
        }
        msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.trimming_merging));
        mHandler.sendMessage(msg);
        ffmpegc.concatAndTrimFilesMP4Stream(mMediaList, mMerge, mPreconvertClipsToMP4, mUseCatCmd, sc);
        msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.merging_video_audio));
        mHandler.sendMessage(msg);
        ffmpegc.combineAudioAndVideo(mMerge, maOut, mOut, sc);
        //processing complete message
        msg = mHandler.obtainMessage(0);
        mHandler.sendMessage(msg);
        //now scan for media to add to gallery
        File fileTest = new File(mOut.path);
        if (fileTest.exists() && fileTest.length() > 0) {
            MediaScannerConnection.scanFile(mContext, new String[] { mOut.path }, new String[] { mOut.mimeType }, new OnScanCompletedListener() {

                @Override
                public void onScanCompleted(String path, Uri uri) {
                    Message msg = mHandler.obtainMessage(4);
                    msg.getData().putString("path", mOut.path);
                    mHandler.sendMessage(msg);
                }
            });
        } else {
            msg = mHandler.obtainMessage(0);
            msg.getData().putString("error", mContext.getString(R.string.export_error_3));
            mHandler.sendMessage(msg);
        }
    } catch (Exception e) {
        Message msg = mHandler.obtainMessage(0);
        msg.getData().putString("status", mContext.getString(R.string.export_error_4) + ": " + e.getMessage());
        mHandler.sendMessage(msg);
        Timber.e(e, "error exporting");
    }
}
Also used : OnScanCompletedListener(android.media.MediaScannerConnection.OnScanCompletedListener) Message(android.os.Message) ArrayList(java.util.ArrayList) MediaDesc(org.ffmpeg.android.MediaDesc) Uri(android.net.Uri) FfmpegController(org.ffmpeg.android.FfmpegController) File(java.io.File) SoxController(net.sourceforge.sox.SoxController)

Aggregations

OnScanCompletedListener (android.media.MediaScannerConnection.OnScanCompletedListener)1 Uri (android.net.Uri)1 Message (android.os.Message)1 File (java.io.File)1 ArrayList (java.util.ArrayList)1 SoxController (net.sourceforge.sox.SoxController)1 FfmpegController (org.ffmpeg.android.FfmpegController)1 MediaDesc (org.ffmpeg.android.MediaDesc)1