Search in sources :

Example 11 with MediaFormat

use of android.media.MediaFormat in project ExoPlayer by google.

the class MediaCodecVideoRenderer method configureCodec.

@Override
protected void configureCodec(MediaCodecInfo codecInfo, MediaCodec codec, Format format, MediaCrypto crypto) throws DecoderQueryException {
    codecMaxValues = getCodecMaxValues(codecInfo, format, streamFormats);
    MediaFormat mediaFormat = getMediaFormat(format, codecMaxValues, deviceNeedsAutoFrcWorkaround, tunnelingAudioSessionId);
    codec.configure(mediaFormat, surface, crypto, 0);
    if (Util.SDK_INT >= 23 && tunneling) {
        tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec);
    }
}
Also used : MediaFormat(android.media.MediaFormat)

Example 12 with MediaFormat

use of android.media.MediaFormat in project android_frameworks_base by crdroidandroid.

the class MediaPlayer method addSubtitleSource.

/** @hide */
public void addSubtitleSource(InputStream is, MediaFormat format) throws IllegalStateException {
    final InputStream fIs = is;
    final MediaFormat fFormat = format;
    if (is != null) {
        // way to implement timeouts in the future.
        synchronized (mOpenSubtitleSources) {
            mOpenSubtitleSources.add(is);
        }
    } else {
        Log.w(TAG, "addSubtitleSource called with null InputStream");
    }
    getMediaTimeProvider();
    // process each subtitle in its own thread
    final HandlerThread thread = new HandlerThread("SubtitleReadThread", Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
    thread.start();
    Handler handler = new Handler(thread.getLooper());
    handler.post(new Runnable() {

        private int addTrack() {
            if (fIs == null || mSubtitleController == null) {
                return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
            }
            SubtitleTrack track = mSubtitleController.addTrack(fFormat);
            if (track == null) {
                return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
            }
            // TODO: do the conversion in the subtitle track
            Scanner scanner = new Scanner(fIs, "UTF-8");
            String contents = scanner.useDelimiter("\\A").next();
            synchronized (mOpenSubtitleSources) {
                mOpenSubtitleSources.remove(fIs);
            }
            scanner.close();
            synchronized (mIndexTrackPairs) {
                mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
            }
            Handler h = mTimeProvider.mEventHandler;
            int what = TimeProvider.NOTIFY;
            int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
            Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, contents.getBytes());
            Message m = h.obtainMessage(what, arg1, 0, trackData);
            h.sendMessage(m);
            return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
        }

        public void run() {
            int res = addTrack();
            if (mEventHandler != null) {
                Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
                mEventHandler.sendMessage(m);
            }
            thread.getLooper().quitSafely();
        }
    });
}
Also used : MediaFormat(android.media.MediaFormat) Scanner(java.util.Scanner) HandlerThread(android.os.HandlerThread) Message(android.os.Message) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) Runnable(java.lang.Runnable) Handler(android.os.Handler) Pair(android.util.Pair)

Example 13 with MediaFormat

use of android.media.MediaFormat in project android_frameworks_base by crdroidandroid.

the class MediaPlayer method addTimedTextSource.

/**
     * Adds an external timed text file (FileDescriptor).
     *
     * It is the caller's responsibility to close the file descriptor.
     * It is safe to do so as soon as this call returns.
     *
     * Currently supported format is SubRip. Note that a single external timed text source may
     * contain multiple tracks in it. One can find the total number of available tracks
     * using {@link #getTrackInfo()} to see what additional tracks become available
     * after this method call.
     *
     * @param fd the FileDescriptor for the file you want to play
     * @param offset the offset into the file where the data to be played starts, in bytes
     * @param length the length in bytes of the data to be played
     * @param mime The mime type of the file. Must be one of the mime types listed above.
     * @throws IllegalArgumentException if the mimeType is not supported.
     * @throws IllegalStateException if called in an invalid state.
     */
public void addTimedTextSource(FileDescriptor fd, long offset, long length, String mime) throws IllegalArgumentException, IllegalStateException {
    if (!availableMimeTypeForExternalSource(mime)) {
        throw new IllegalArgumentException("Illegal mimeType for timed text source: " + mime);
    }
    final FileDescriptor dupedFd;
    try {
        dupedFd = Libcore.os.dup(fd);
    } catch (ErrnoException ex) {
        Log.e(TAG, ex.getMessage(), ex);
        throw new RuntimeException(ex);
    }
    final MediaFormat fFormat = new MediaFormat();
    fFormat.setString(MediaFormat.KEY_MIME, mime);
    fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1);
    // A MediaPlayer created by a VideoView should already have its mSubtitleController set.
    if (mSubtitleController == null) {
        setSubtitleAnchor();
    }
    if (!mSubtitleController.hasRendererFor(fFormat)) {
        // test and add not atomic
        Context context = ActivityThread.currentApplication();
        mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler));
    }
    final SubtitleTrack track = mSubtitleController.addTrack(fFormat);
    synchronized (mIndexTrackPairs) {
        mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
    }
    getMediaTimeProvider();
    final long offset2 = offset;
    final long length2 = length;
    final HandlerThread thread = new HandlerThread("TimedTextReadThread", Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
    thread.start();
    Handler handler = new Handler(thread.getLooper());
    handler.post(new Runnable() {

        private int addTrack() {
            final ByteArrayOutputStream bos = new ByteArrayOutputStream();
            try {
                Libcore.os.lseek(dupedFd, offset2, OsConstants.SEEK_SET);
                byte[] buffer = new byte[4096];
                for (long total = 0; total < length2; ) {
                    int bytesToRead = (int) Math.min(buffer.length, length2 - total);
                    int bytes = IoBridge.read(dupedFd, buffer, 0, bytesToRead);
                    if (bytes < 0) {
                        break;
                    } else {
                        bos.write(buffer, 0, bytes);
                        total += bytes;
                    }
                }
                Handler h = mTimeProvider.mEventHandler;
                int what = TimeProvider.NOTIFY;
                int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
                Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, bos.toByteArray());
                Message m = h.obtainMessage(what, arg1, 0, trackData);
                h.sendMessage(m);
                return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
            } catch (Exception e) {
                Log.e(TAG, e.getMessage(), e);
                return MEDIA_INFO_TIMED_TEXT_ERROR;
            } finally {
                try {
                    Libcore.os.close(dupedFd);
                } catch (ErrnoException e) {
                    Log.e(TAG, e.getMessage(), e);
                }
            }
        }

        public void run() {
            int res = addTrack();
            if (mEventHandler != null) {
                Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
                mEventHandler.sendMessage(m);
            }
            thread.getLooper().quitSafely();
        }
    });
}
Also used : MediaFormat(android.media.MediaFormat) Context(android.content.Context) Message(android.os.Message) Handler(android.os.Handler) ByteArrayOutputStream(java.io.ByteArrayOutputStream) AssetFileDescriptor(android.content.res.AssetFileDescriptor) FileDescriptor(java.io.FileDescriptor) ErrnoException(android.system.ErrnoException) IOException(java.io.IOException) ErrnoException(android.system.ErrnoException) HandlerThread(android.os.HandlerThread) Runnable(java.lang.Runnable) Pair(android.util.Pair)

Example 14 with MediaFormat

use of android.media.MediaFormat in project android_frameworks_base by crdroidandroid.

the class VideoView method openVideo.

private void openVideo() {
    if (mUri == null || mSurfaceHolder == null) {
        // not ready for playback just yet, will try again later
        return;
    }
    // we shouldn't clear the target state, because somebody might have
    // called start() previously
    release(false);
    AudioManager am = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
    am.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN);
    try {
        mMediaPlayer = new MediaPlayer();
        // TODO: create SubtitleController in MediaPlayer, but we need
        // a context for the subtitle renderers
        final Context context = getContext();
        final SubtitleController controller = new SubtitleController(context, mMediaPlayer.getMediaTimeProvider(), mMediaPlayer);
        controller.registerRenderer(new WebVttRenderer(context));
        controller.registerRenderer(new TtmlRenderer(context));
        controller.registerRenderer(new Cea708CaptionRenderer(context));
        controller.registerRenderer(new ClosedCaptionRenderer(context));
        mMediaPlayer.setSubtitleAnchor(controller, this);
        if (mAudioSession != 0) {
            mMediaPlayer.setAudioSessionId(mAudioSession);
        } else {
            mAudioSession = mMediaPlayer.getAudioSessionId();
        }
        mMediaPlayer.setOnPreparedListener(mPreparedListener);
        mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
        mMediaPlayer.setOnCompletionListener(mCompletionListener);
        mMediaPlayer.setOnErrorListener(mErrorListener);
        mMediaPlayer.setOnInfoListener(mInfoListener);
        mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
        mCurrentBufferPercentage = 0;
        mMediaPlayer.setDataSource(mContext, mUri, mHeaders);
        mMediaPlayer.setDisplay(mSurfaceHolder);
        mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
        mMediaPlayer.setScreenOnWhilePlaying(true);
        mMediaPlayer.prepareAsync();
        for (Pair<InputStream, MediaFormat> pending : mPendingSubtitleTracks) {
            try {
                mMediaPlayer.addSubtitleSource(pending.first, pending.second);
            } catch (IllegalStateException e) {
                mInfoListener.onInfo(mMediaPlayer, MediaPlayer.MEDIA_INFO_UNSUPPORTED_SUBTITLE, 0);
            }
        }
        // we don't set the target state here either, but preserve the
        // target state that was there before.
        mCurrentState = STATE_PREPARING;
        attachMediaController();
    } catch (IOException ex) {
        Log.w(TAG, "Unable to open content: " + mUri, ex);
        mCurrentState = STATE_ERROR;
        mTargetState = STATE_ERROR;
        mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
        return;
    } catch (IllegalArgumentException ex) {
        Log.w(TAG, "Unable to open content: " + mUri, ex);
        mCurrentState = STATE_ERROR;
        mTargetState = STATE_ERROR;
        mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
        return;
    } finally {
        mPendingSubtitleTracks.clear();
    }
}
Also used : Context(android.content.Context) MediaFormat(android.media.MediaFormat) InputStream(java.io.InputStream) TtmlRenderer(android.media.TtmlRenderer) IOException(java.io.IOException) WebVttRenderer(android.media.WebVttRenderer) AudioManager(android.media.AudioManager) SubtitleController(android.media.SubtitleController) Cea708CaptionRenderer(android.media.Cea708CaptionRenderer) ClosedCaptionRenderer(android.media.ClosedCaptionRenderer) MediaPlayer(android.media.MediaPlayer)

Example 15 with MediaFormat

use of android.media.MediaFormat in project android_frameworks_base by AOSPA.

the class Camera2RecordingTest method validateRecording.

private void validateRecording(Size sz, int expectedDurationMs) throws Exception {
    File outFile = new File(mOutMediaFileName);
    assertTrue("No video is recorded", outFile.exists());
    MediaExtractor extractor = new MediaExtractor();
    try {
        extractor.setDataSource(mOutMediaFileName);
        long durationUs = 0;
        int width = -1, height = -1;
        int numTracks = extractor.getTrackCount();
        final String VIDEO_MIME_TYPE = "video";
        for (int i = 0; i < numTracks; i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.contains(VIDEO_MIME_TYPE)) {
                Log.i(TAG, "video format is: " + format.toString());
                durationUs = format.getLong(MediaFormat.KEY_DURATION);
                width = format.getInteger(MediaFormat.KEY_WIDTH);
                height = format.getInteger(MediaFormat.KEY_HEIGHT);
                break;
            }
        }
        Size videoSz = new Size(width, height);
        assertTrue("Video size doesn't match, expected " + sz.toString() + " got " + videoSz.toString(), videoSz.equals(sz));
        int duration = (int) (durationUs / 1000);
        if (VERBOSE) {
            Log.v(TAG, String.format("Video duration: recorded %dms, expected %dms", duration, expectedDurationMs));
        }
        // TODO: Don't skip this for video snapshot
        if (!mStaticInfo.isHardwareLevelLegacy()) {
            assertTrue(String.format("Camera %s: Video duration doesn't match: recorded %dms, expected %dms.", mCamera.getId(), duration, expectedDurationMs), Math.abs(duration - expectedDurationMs) < DURATION_MARGIN * expectedDurationMs);
        }
    } finally {
        extractor.release();
        if (!DEBUG_DUMP) {
            outFile.delete();
        }
    }
}
Also used : MediaFormat(android.media.MediaFormat) Size(android.util.Size) MediaExtractor(android.media.MediaExtractor) File(java.io.File)

Aggregations

MediaFormat (android.media.MediaFormat)130 IOException (java.io.IOException)29 MediaCodec (android.media.MediaCodec)24 ByteBuffer (java.nio.ByteBuffer)22 MediaExtractor (android.media.MediaExtractor)18 SuppressLint (android.annotation.SuppressLint)16 Handler (android.os.Handler)15 Test (org.junit.Test)14 TargetApi (android.annotation.TargetApi)13 InputStream (java.io.InputStream)12 Context (android.content.Context)10 HandlerThread (android.os.HandlerThread)10 Message (android.os.Message)10 Pair (android.util.Pair)10 Format (com.google.android.exoplayer2.Format)10 Runnable (java.lang.Runnable)10 Nullable (androidx.annotation.Nullable)9 File (java.io.File)9 BufferInfo (android.media.MediaCodec.BufferInfo)8 Surface (android.view.Surface)8