Search in sources :

Example 1 with EncoderDebugger

use of net.majorkernelpanic.streaming.hw.EncoderDebugger in project libstreaming by fyhertz.

the class H264Stream method testMediaCodecAPI.

@SuppressLint("NewApi")
private MP4Config testMediaCodecAPI() throws RuntimeException, IOException {
    createCamera();
    updateCamera();
    try {
        if (mQuality.resX >= 640) {
            // Using the MediaCodec API with the buffer method for high resolutions is too slow
            mMode = MODE_MEDIARECORDER_API;
        }
        EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
        return new MP4Config(debugger.getB64SPS(), debugger.getB64PPS());
    } catch (Exception e) {
        // Fallback on the old streaming method using the MediaRecorder API
        Log.e(TAG, "Resolution not supported with the MediaCodec API, we fallback on the old streamign method.");
        mMode = MODE_MEDIARECORDER_API;
        return testH264();
    }
}
Also used : EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) MP4Config(net.majorkernelpanic.streaming.mp4.MP4Config) ConfNotSupportedException(net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException) IOException(java.io.IOException) StorageUnavailableException(net.majorkernelpanic.streaming.exceptions.StorageUnavailableException) SuppressLint(android.annotation.SuppressLint)

Example 2 with EncoderDebugger

use of net.majorkernelpanic.streaming.hw.EncoderDebugger in project libstreaming by fyhertz.

the class VideoStream method encodeWithMediaCodecMethod1.

/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {
    Log.d(TAG, "Video encoded using the MediaCodec API with a buffer");
    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();
    // Estimates the frame rate of the camera
    measureFramerate();
    // Starts the preview if needed
    if (!mPreviewStarted) {
        try {
            mCamera.startPreview();
            mPreviewStarted = true;
        } catch (RuntimeException e) {
            destroyCamera();
            throw e;
        }
    }
    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    final NV21Convertor convertor = debugger.getNV21Convertor();
    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, debugger.getEncoderColorFormat());
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();
    Camera.PreviewCallback callback = new Camera.PreviewCallback() {

        long now = System.nanoTime() / 1000, oldnow = now, i = 0;

        ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            oldnow = now;
            now = System.nanoTime() / 1000;
            if (i++ > 3) {
                i = 0;
            // Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
            }
            try {
                int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
                if (bufferIndex >= 0) {
                    inputBuffers[bufferIndex].clear();
                    if (data == null)
                        Log.e(TAG, "Symptom of the \"Callback buffer was to small\" problem...");
                    else
                        convertor.convert(data, inputBuffers[bufferIndex]);
                    mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
                } else {
                    Log.e(TAG, "No buffer available !");
                }
            } finally {
                mCamera.addCallbackBuffer(data);
            }
        }
    };
    for (int i = 0; i < 10; i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
    mCamera.setPreviewCallbackWithBuffer(callback);
    // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
    mPacketizer.start();
    mStreaming = true;
}
Also used : MediaFormat(android.media.MediaFormat) EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) NV21Convertor(net.majorkernelpanic.streaming.hw.NV21Convertor) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) Camera(android.hardware.Camera) SuppressLint(android.annotation.SuppressLint) SuppressLint(android.annotation.SuppressLint)

Example 3 with EncoderDebugger

use of net.majorkernelpanic.streaming.hw.EncoderDebugger in project libstreaming by fyhertz.

the class VideoStream method encodeWithMediaCodecMethod2.

/**
 * Video encoding is done by a MediaCodec.
 * But here we will use the buffer-to-surface method
 */
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
    Log.d(TAG, "Video encoded using the MediaCodec API with a surface");
    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();
    // Estimates the frame rate of the camera
    measureFramerate();
    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    Surface surface = mMediaCodec.createInputSurface();
    ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
    mMediaCodec.start();
    // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
    mPacketizer.start();
    mStreaming = true;
}
Also used : MediaFormat(android.media.MediaFormat) EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) SurfaceView(net.majorkernelpanic.streaming.gl.SurfaceView) Surface(android.view.Surface) SuppressLint(android.annotation.SuppressLint)

Aggregations

SuppressLint (android.annotation.SuppressLint)3 EncoderDebugger (net.majorkernelpanic.streaming.hw.EncoderDebugger)3 MediaFormat (android.media.MediaFormat)2 MediaCodecInputStream (net.majorkernelpanic.streaming.rtp.MediaCodecInputStream)2 Camera (android.hardware.Camera)1 Surface (android.view.Surface)1 IOException (java.io.IOException)1 ConfNotSupportedException (net.majorkernelpanic.streaming.exceptions.ConfNotSupportedException)1 StorageUnavailableException (net.majorkernelpanic.streaming.exceptions.StorageUnavailableException)1 SurfaceView (net.majorkernelpanic.streaming.gl.SurfaceView)1 NV21Convertor (net.majorkernelpanic.streaming.hw.NV21Convertor)1 MP4Config (net.majorkernelpanic.streaming.mp4.MP4Config)1