Search in sources :

Example 16 with AudioRecord

use of android.media.AudioRecord in project libstreaming by fyhertz.

the class AACStream method encodeWithMediaCodec.

@Override
@SuppressLint({ "InlinedApi", "NewApi" })
protected void encodeWithMediaCodec() throws IOException {
    final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 2;
    ((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate);
    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
    mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
    MediaFormat format = new MediaFormat();
    format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
    format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
    format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
    format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
    mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mAudioRecord.startRecording();
    mMediaCodec.start();
    final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
    final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
    mThread = new Thread(new Runnable() {

        @Override
        public void run() {
            int len = 0, bufferIndex = 0;
            try {
                while (!Thread.interrupted()) {
                    bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
                    if (bufferIndex >= 0) {
                        inputBuffers[bufferIndex].clear();
                        len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
                        if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) {
                            Log.e(TAG, "An error occured with the AudioRecord API !");
                        } else {
                            // Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
                            mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime() / 1000, 0);
                        }
                    }
                }
            } catch (RuntimeException e) {
                e.printStackTrace();
            }
        }
    });
    mThread.start();
    // The packetizer encapsulates this stream in an RTP stream and send it over the network
    mPacketizer.setInputStream(inputStream);
    mPacketizer.start();
    mStreaming = true;
}
Also used : AudioRecord(android.media.AudioRecord) MediaFormat(android.media.MediaFormat) AACLATMPacketizer(net.majorkernelpanic.streaming.rtp.AACLATMPacketizer) MediaCodecInputStream(net.majorkernelpanic.streaming.rtp.MediaCodecInputStream) ByteBuffer(java.nio.ByteBuffer) SuppressLint(android.annotation.SuppressLint) SuppressLint(android.annotation.SuppressLint)

Example 17 with AudioRecord

use of android.media.AudioRecord in project robolectric by robolectric.

the class ShadowAudioRecordTest method setSource_instanceCreatedBeforeSetSourceIsCalled.

@Test
public void setSource_instanceCreatedBeforeSetSourceIsCalled() {
    AudioRecord audioRecord = createAudioRecord();
    audioRecord.startRecording();
    byte[] audioRecordInput = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 };
    ShadowAudioRecord.setSource(createAudioRecordSource(audioRecordInput));
    byte[] audioRecordData = new byte[100];
    int audioRecordBytesRead = audioRecord.read(audioRecordData, 0, 100);
    audioRecord.stop();
    audioRecord.release();
    assertThat(audioRecordBytesRead).isEqualTo(audioRecordInput.length);
    assertThat(Arrays.copyOf(audioRecordData, audioRecordInput.length)).isEqualTo(audioRecordInput);
}
Also used : AudioRecord(android.media.AudioRecord) Test(org.junit.Test)

Example 18 with AudioRecord

use of android.media.AudioRecord in project robolectric by robolectric.

the class ShadowAudioRecordTest method nativeReadByteCallsAudioRecordSourceWhenSetBlockingMOnwards.

@Test
@Config(minSdk = M)
public void nativeReadByteCallsAudioRecordSourceWhenSetBlockingMOnwards() {
    AudioRecordSource source = Mockito.mock(AudioRecordSource.class);
    ShadowAudioRecord.setSource(source);
    AudioRecord audioRecord = createAudioRecord();
    audioRecord.startRecording();
    audioRecord.read(new byte[100], 0, 100, AudioRecord.READ_BLOCKING);
    verify(source).readInByteArray(any(byte[].class), eq(0), eq(100), /* isBlocking=*/
    eq(true));
    verifyNoMoreInteractions(source);
}
Also used : AudioRecord(android.media.AudioRecord) AudioRecordSource(org.robolectric.shadows.ShadowAudioRecord.AudioRecordSource) Test(org.junit.Test) Config(org.robolectric.annotation.Config)

Example 19 with AudioRecord

use of android.media.AudioRecord in project robolectric by robolectric.

the class ShadowAudioRecordTest method nativeReadFloatCallsAudioRecordSourceWhenSetNonBlocking.

@Test
@Config(minSdk = M)
public void nativeReadFloatCallsAudioRecordSourceWhenSetNonBlocking() {
    AudioRecordSource source = Mockito.mock(AudioRecordSource.class);
    ShadowAudioRecord.setSource(source);
    AudioRecord audioRecord = new AudioRecord(AudioSource.MIC, 16000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_FLOAT, 1024);
    audioRecord.startRecording();
    audioRecord.read(new float[100], 0, 100, AudioRecord.READ_NON_BLOCKING);
    verify(source).readInFloatArray(any(float[].class), eq(0), eq(100), /* isBlocking=*/
    eq(false));
    verifyNoMoreInteractions(source);
}
Also used : AudioRecord(android.media.AudioRecord) AudioRecordSource(org.robolectric.shadows.ShadowAudioRecord.AudioRecordSource) Test(org.junit.Test) Config(org.robolectric.annotation.Config)

Example 20 with AudioRecord

use of android.media.AudioRecord in project robolectric by robolectric.

the class ShadowAudioRecordTest method nativeReadByteBufferCallsAudioRecordSourceWhenSetBlockingMOnwards.

@Test
@Config(minSdk = M)
public void nativeReadByteBufferCallsAudioRecordSourceWhenSetBlockingMOnwards() {
    AudioRecordSource source = Mockito.mock(AudioRecordSource.class);
    ShadowAudioRecord.setSource(source);
    AudioRecord audioRecord = createAudioRecord();
    audioRecord.startRecording();
    audioRecord.read(ByteBuffer.allocate(100), 100, AudioRecord.READ_BLOCKING);
    verify(source).readInDirectBuffer(any(ByteBuffer.class), eq(100), /* isBlocking=*/
    eq(true));
    verifyNoMoreInteractions(source);
}
Also used : AudioRecord(android.media.AudioRecord) AudioRecordSource(org.robolectric.shadows.ShadowAudioRecord.AudioRecordSource) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test) Config(org.robolectric.annotation.Config)

Aggregations

AudioRecord (android.media.AudioRecord)64 Test (org.junit.Test)22 AudioRecordSource (org.robolectric.shadows.ShadowAudioRecord.AudioRecordSource)13 AssetFileDescriptor (android.content.res.AssetFileDescriptor)12 AudioEffect (android.media.audiofx.AudioEffect)12 LargeTest (android.test.suitebuilder.annotation.LargeTest)12 Config (org.robolectric.annotation.Config)12 AudioFormat (android.media.AudioFormat)6 SystemApi (android.annotation.SystemApi)5 ByteBuffer (java.nio.ByteBuffer)4 SuppressLint (android.annotation.SuppressLint)1 MediaFormat (android.media.MediaFormat)1 MediaRecorder (android.media.MediaRecorder)1 Handler (android.os.Handler)1 RequiresPermission (androidx.annotation.RequiresPermission)1 AudioDispatcher (be.tarsos.dsp.AudioDispatcher)1 TarsosDSPAudioFormat (be.tarsos.dsp.io.TarsosDSPAudioFormat)1 TarsosDSPAudioInputStream (be.tarsos.dsp.io.TarsosDSPAudioInputStream)1 BufferData (com.libra.sinvoice.Buffer.BufferData)1 ActorCreator (im.actor.runtime.actors.ActorCreator)1