Search in sources :

Example 81 with MediaFormat

use of android.media.MediaFormat in project grafika by google.

the class MoviePlayer method play.

/**
 * Decodes the video stream, sending frames to the surface.
 * <p>
 * Does not return until video playback is complete, or we get a "stop" signal from
 * frameCallback.
 */
public void play() throws IOException {
    MediaExtractor extractor = null;
    MediaCodec decoder = null;
    // file exists so we can throw a better one if it's not there.
    if (!mSourceFile.canRead()) {
        throw new FileNotFoundException("Unable to read " + mSourceFile);
    }
    try {
        extractor = new MediaExtractor();
        extractor.setDataSource(mSourceFile.toString());
        int trackIndex = selectTrack(extractor);
        if (trackIndex < 0) {
            throw new RuntimeException("No video track found in " + mSourceFile);
        }
        extractor.selectTrack(trackIndex);
        MediaFormat format = extractor.getTrackFormat(trackIndex);
        // Create a MediaCodec decoder, and configure it with the MediaFormat from the
        // extractor.  It's very important to use the format from the extractor because
        // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks.
        String mime = format.getString(MediaFormat.KEY_MIME);
        decoder = MediaCodec.createDecoderByType(mime);
        decoder.configure(format, mOutputSurface, null, 0);
        decoder.start();
        doExtract(extractor, trackIndex, decoder, mFrameCallback);
    } finally {
        // release everything we grabbed
        if (decoder != null) {
            decoder.stop();
            decoder.release();
            decoder = null;
        }
        if (extractor != null) {
            extractor.release();
            extractor = null;
        }
    }
}
Also used : MediaFormat(android.media.MediaFormat) MediaCodec(android.media.MediaCodec) FileNotFoundException(java.io.FileNotFoundException) MediaExtractor(android.media.MediaExtractor)

Example 82 with MediaFormat

use of android.media.MediaFormat in project grafika by google.

the class GeneratedMovie method drainEncoder.

/**
 * Extracts all pending data from the encoder.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 */
protected void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE)
        Log.d(TAG, "drainEncoder(" + endOfStream + ")");
    if (endOfStream) {
        if (VERBOSE)
            Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }
    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                // out of while
                break;
            } else {
                if (VERBOSE)
                    Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);
            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
        // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
            }
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE)
                    Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }
            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }
                // adjust the ByteBuffer values to match BufferInfo
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE)
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }
            mEncoder.releaseOutputBuffer(encoderStatus, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE)
                        Log.d(TAG, "end of stream reached");
                }
                // out of while
                break;
            }
        }
    }
}
Also used : MediaFormat(android.media.MediaFormat) ByteBuffer(java.nio.ByteBuffer)

Example 83 with MediaFormat

use of android.media.MediaFormat in project grafika by google.

the class SoftInputSurfaceActivity method drainEncoder.

/**
 * Extracts all pending data from the encoder.
 * <p>
 * If endOfStream is not set, this returns when there is no more data to drain.  If it
 * is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
 * Calling this with endOfStream set should be done once, right before stopping the muxer.
 */
private void drainEncoder(boolean endOfStream) {
    final int TIMEOUT_USEC = 10000;
    if (VERBOSE)
        Log.d(TAG, "drainEncoder(" + endOfStream + ")");
    if (endOfStream) {
        if (VERBOSE)
            Log.d(TAG, "sending EOS to encoder");
        mEncoder.signalEndOfInputStream();
    }
    ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
    while (true) {
        int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // no output available yet
            if (!endOfStream) {
                // out of while
                break;
            } else {
                if (VERBOSE)
                    Log.d(TAG, "no output available, spinning to await EOS");
            }
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            // not expected for an encoder
            encoderOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // should happen before receiving buffers, and should only happen once
            if (mMuxerStarted) {
                throw new RuntimeException("format changed twice");
            }
            MediaFormat newFormat = mEncoder.getOutputFormat();
            Log.d(TAG, "encoder output format changed: " + newFormat);
            // now that we have the Magic Goodies, start the muxer
            mTrackIndex = mMuxer.addTrack(newFormat);
            mMuxer.start();
            mMuxerStarted = true;
        } else if (encoderStatus < 0) {
            Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
        // let's ignore it
        } else {
            ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
            if (encodedData == null) {
                throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
            }
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                if (VERBOSE)
                    Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                mBufferInfo.size = 0;
            }
            if (mBufferInfo.size != 0) {
                if (!mMuxerStarted) {
                    throw new RuntimeException("muxer hasn't started");
                }
                // adjust the ByteBuffer values to match BufferInfo
                encodedData.position(mBufferInfo.offset);
                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                mBufferInfo.presentationTimeUs = mFakePts;
                mFakePts += 1000000L / FRAMES_PER_SECOND;
                mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                if (VERBOSE)
                    Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
            }
            mEncoder.releaseOutputBuffer(encoderStatus, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (!endOfStream) {
                    Log.w(TAG, "reached end of stream unexpectedly");
                } else {
                    if (VERBOSE)
                        Log.d(TAG, "end of stream reached");
                }
                // out of while
                break;
            }
        }
    }
}
Also used : MediaFormat(android.media.MediaFormat) ByteBuffer(java.nio.ByteBuffer) Paint(android.graphics.Paint)

Example 84 with MediaFormat

use of android.media.MediaFormat in project speechutils by Kaljurand.

the class AudioUtils method getAvailableEncoders.

// TODO: use MediaFormat.MIMETYPE_AUDIO_FLAC) on API>=21
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public static List<String> getAvailableEncoders(String mime, int sampleRate) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
        MediaFormat format = MediaFormatFactory.createMediaFormat(mime, sampleRate);
        MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
        String encoderAsStr = mcl.findEncoderForFormat(format);
        List<String> encoders = new ArrayList<>();
        for (MediaCodecInfo info : mcl.getCodecInfos()) {
            if (info.isEncoder()) {
                String name = info.getName();
                String infoAsStr = name + ": " + TextUtils.join(", ", info.getSupportedTypes());
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
                    infoAsStr += String.format(": %s/%s/%s/%s", info.isHardwareAccelerated(), info.isSoftwareOnly(), info.isAlias(), info.isVendor());
                }
                if (name.equals(encoderAsStr)) {
                    infoAsStr = '#' + infoAsStr;
                }
                encoders.add(infoAsStr);
            }
        }
        return encoders;
    }
    return Collections.emptyList();
}
Also used : MediaFormat(android.media.MediaFormat) MediaCodecInfo(android.media.MediaCodecInfo) MediaCodecList(android.media.MediaCodecList) ArrayList(java.util.ArrayList) TargetApi(android.annotation.TargetApi)

Example 85 with MediaFormat

use of android.media.MediaFormat in project libstreaming by fyhertz.

the class EncoderDebugger method searchSPSandPPS.

/**
 * Tries to obtain the SPS and the PPS for the encoder.
 */
private long searchSPSandPPS() {
    ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();
    BufferInfo info = new BufferInfo();
    byte[] csd = new byte[128];
    int len = 0, p = 4, q = 4;
    long elapsed = 0, now = timestamp();
    while (elapsed < 3000000 && (mSPS == null || mPPS == null)) {
        // Some encoders won't give us the SPS and PPS unless they receive something to encode first...
        int bufferIndex = mEncoder.dequeueInputBuffer(1000000 / FRAMERATE);
        if (bufferIndex >= 0) {
            check(inputBuffers[bufferIndex].capacity() >= mData.length, "The input buffer is not big enough.");
            inputBuffers[bufferIndex].clear();
            inputBuffers[bufferIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(bufferIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE)
                Log.e(TAG, "No buffer available !");
        }
        // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
        // encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
        // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...
        int index = mEncoder.dequeueOutputBuffer(info, 1000000 / FRAMERATE);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // The PPS and PPS shoud be there
            MediaFormat format = mEncoder.getOutputFormat();
            ByteBuffer spsb = format.getByteBuffer("csd-0");
            ByteBuffer ppsb = format.getByteBuffer("csd-1");
            mSPS = new byte[spsb.capacity() - 4];
            spsb.position(4);
            spsb.get(mSPS, 0, mSPS.length);
            mPPS = new byte[ppsb.capacity() - 4];
            ppsb.position(4);
            ppsb.get(mPPS, 0, mPPS.length);
            break;
        } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            outputBuffers = mEncoder.getOutputBuffers();
        } else if (index >= 0) {
            len = info.size;
            if (len < 128) {
                outputBuffers[index].get(csd, 0, len);
                if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1) {
                    // depending on the phone so we don't make any assumption about that
                    while (p < len) {
                        while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len) p++;
                        if (p + 3 >= len)
                            p = len;
                        if ((csd[q] & 0x1F) == 7) {
                            mSPS = new byte[p - q];
                            System.arraycopy(csd, q, mSPS, 0, p - q);
                        } else {
                            mPPS = new byte[p - q];
                            System.arraycopy(csd, q, mPPS, 0, p - q);
                        }
                        p += 4;
                        q = p;
                    }
                }
            }
            mEncoder.releaseOutputBuffer(index, false);
        }
        elapsed = timestamp() - now;
    }
    check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
    mB64PPS = Base64.encodeToString(mPPS, 0, mPPS.length, Base64.NO_WRAP);
    mB64SPS = Base64.encodeToString(mSPS, 0, mSPS.length, Base64.NO_WRAP);
    return elapsed;
}
Also used : MediaFormat(android.media.MediaFormat) BufferInfo(android.media.MediaCodec.BufferInfo) ByteBuffer(java.nio.ByteBuffer) SuppressLint(android.annotation.SuppressLint)

Aggregations

MediaFormat (android.media.MediaFormat)87 IOException (java.io.IOException)27 ByteBuffer (java.nio.ByteBuffer)18 MediaExtractor (android.media.MediaExtractor)16 MediaCodec (android.media.MediaCodec)15 InputStream (java.io.InputStream)12 TargetApi (android.annotation.TargetApi)11 Context (android.content.Context)10 Handler (android.os.Handler)10 HandlerThread (android.os.HandlerThread)10 Message (android.os.Message)10 Pair (android.util.Pair)10 Runnable (java.lang.Runnable)10 SuppressLint (android.annotation.SuppressLint)9 File (java.io.File)8 BufferInfo (android.media.MediaCodec.BufferInfo)7 FileInputStream (java.io.FileInputStream)7 MediaPlayer (android.media.MediaPlayer)6 Surface (android.view.Surface)6 AssetFileDescriptor (android.content.res.AssetFileDescriptor)5