Search in sources :

Example 1 with MediaCodecInputStream

use of net.majorkernelpanic.streaming.rtp.MediaCodecInputStream in project libstreaming by fyhertz.

the class AACStream method encodeWithMediaCodec.

@Override
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodec() throws IOException {
    final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2;
    ((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate);
    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
    mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
    MediaFormat format = new MediaFormat();
    format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
    format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
    format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
    format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
    mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mAudioRecord.startRecording();
    mMediaCodec.start();
    final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
    final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
    mThread = new Thread(new Runnable() {

        @Override
        public void run() {
            int len = 0, bufferIndex = 0;
            try {
                while (!Thread.interrupted()) {
                    bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
                    if (bufferIndex >= 0) {
                        inputBuffers[bufferIndex].clear();
                        len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
                        if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
                            Log.e(TAG, "An error occured with the AudioRecord API !");
                        } else {
                            // Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
                            mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0);
                        }
                    }
                }
            } catch (RuntimeException e) {
                e.printStackTrace();
            }
        }
    });
    mThread.start();
    // The packetizer encapsulates this stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(inputStream);
    mPacketizer.start();
    mStreaming = true;
}
Also used : AudioRecord(android.media.AudioRecord) MediaFormat(android.media.MediaFormat) AACLATMPacketizer(net.majorkernelpanic.streaming.rtp.AACLATMPacketizer) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) ByteBuffer(java.nio.ByteBuffer) SuppressLint(android.annotation.SuppressLint) SuppressLint(android.annotation.SuppressLint)

Example 2 with MediaCodecInputStream

use of net.majorkernelpanic.streaming.rtp.MediaCodecInputStream in project libstreaming by fyhertz.

the class VideoStream method encodeWithMediaCodecMethod1.

/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {
    Log.d(TAG, "Video encoded using the MediaCodec API with a buffer");
    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();
    // Estimates the frame rate of the camera
    measureFramerate();
    // Starts the preview if needed
    if (!mPreviewStarted) {
        try {
            mCamera.startPreview();
            mPreviewStarted = true;
        } catch (RuntimeException e) {
            destroyCamera();
            throw e;
        }
    }
    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    final NV21Convertor convertor = debugger.getNV21Convertor();
    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, debugger.getEncoderColorFormat());
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();
    Camera.PreviewCallback callback = new Camera.PreviewCallback() {

        long now = System.nanoTime() / 1000, oldnow = now, i = 0;

        ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            oldnow = now;
            now = System.nanoTime() / 1000;
            if (i++ > 3) {
                i = 0;
            // Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
            }
            try {
                int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
                if (bufferIndex >= 0) {
                    inputBuffers[bufferIndex].clear();
                    if (data == null)
                        Log.e(TAG, "Symptom of the \"Callback buffer was to small\" problem...");
                    else
                        convertor.convert(data, inputBuffers[bufferIndex]);
                    mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
                } else {
                    Log.e(TAG, "No buffer available !");
                }
            } finally {
                mCamera.addCallbackBuffer(data);
            }
        }
    };
    for (int i = 0; i < 10; i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
    mCamera.setPreviewCallbackWithBuffer(callback);
    // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
    mPacketizer.start();
    mStreaming = true;
}
Also used : MediaFormat(android.media.MediaFormat) EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) NV21Convertor(net.majorkernelpanic.streaming.hw.NV21Convertor) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) Camera(android.hardware.Camera) SuppressLint(android.annotation.SuppressLint) SuppressLint(android.annotation.SuppressLint)

Example 3 with MediaCodecInputStream

use of net.majorkernelpanic.streaming.rtp.MediaCodecInputStream in project libstreaming by fyhertz.

the class VideoStream method encodeWithMediaCodecMethod2.

/**
 * Video encoding is done by a MediaCodec.
 * But here we will use the buffer-to-surface method
 */
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {
    Log.d(TAG, "Video encoded using the MediaCodec API with a surface");
    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();
    // Estimates the frame rate of the camera
    measureFramerate();
    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    Surface surface = mMediaCodec.createInputSurface();
    ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
    mMediaCodec.start();
    // The packetizer encapsulates the bit stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
    mPacketizer.start();
    mStreaming = true;
}
Also used : MediaFormat(android.media.MediaFormat) EncoderDebugger(net.majorkernelpanic.streaming.hw.EncoderDebugger) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) SurfaceView(net.majorkernelpanic.streaming.gl.SurfaceView) Surface(android.view.Surface) SuppressLint(android.annotation.SuppressLint)

Aggregations

SuppressLint (android.annotation.SuppressLint)3 MediaFormat (android.media.MediaFormat)3 MediaCodecInputStream (net.majorkernelpanic.streaming.rtp.MediaCodecInputStream)3 EncoderDebugger (net.majorkernelpanic.streaming.hw.EncoderDebugger)2 Camera (android.hardware.Camera)1 AudioRecord (android.media.AudioRecord)1 Surface (android.view.Surface)1 ByteBuffer (java.nio.ByteBuffer)1 SurfaceView (net.majorkernelpanic.streaming.gl.SurfaceView)1 NV21Convertor (net.majorkernelpanic.streaming.hw.NV21Convertor)1 AACLATMPacketizer (net.majorkernelpanic.streaming.rtp.AACLATMPacketizer)1