Search in sources :

Example 11 with AudioFormat

use of android.media.AudioFormat in project android_frameworks_base by crdroidandroid.

the class BlockingAudioTrack method createStreamingAudioTrack.

private AudioTrack createStreamingAudioTrack() {
    final int channelConfig = getChannelConfig(mChannelCount);
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(mSampleRateInHz, channelConfig, mAudioFormat);
    int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);
    AudioFormat audioFormat = (new AudioFormat.Builder()).setChannelMask(channelConfig).setEncoding(mAudioFormat).setSampleRate(mSampleRateInHz).build();
    AudioTrack audioTrack = new AudioTrack(mAudioParams.mAudioAttributes, audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM, mAudioParams.mSessionId);
    if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        Log.w(TAG, "Unable to create audio track.");
        audioTrack.release();
        return null;
    }
    mAudioBufferSize = bufferSizeInBytes;
    setupVolume(audioTrack, mAudioParams.mVolume, mAudioParams.mPan);
    return audioTrack;
}
Also used : AudioTrack(android.media.AudioTrack) AudioFormat(android.media.AudioFormat)

Example 12 with AudioFormat

use of android.media.AudioFormat in project android_frameworks_base by crdroidandroid.

the class RecordingActivityMonitor method updateSnapshot.

/**
     * Update the internal "view" of the active recording sessions
     * @param event
     * @param session
     * @param source
     * @param recordingFormat see
     *     {@link AudioSystem.AudioRecordingCallback#onRecordingConfigurationChanged(int, int, int, int[])}
     *     for the definition of the contents of the array
     * @return null if the list of active recording sessions has not been modified, a list
     *     with the current active configurations otherwise.
     */
private List<AudioRecordingConfiguration> updateSnapshot(int event, int session, int source, int[] recordingInfo) {
    final boolean configChanged;
    final ArrayList<AudioRecordingConfiguration> configs;
    synchronized (mRecordConfigs) {
        switch(event) {
            case AudioManager.RECORD_CONFIG_EVENT_STOP:
                // return failure if an unknown recording session stopped
                configChanged = (mRecordConfigs.remove(new Integer(session)) != null);
                break;
            case AudioManager.RECORD_CONFIG_EVENT_START:
                final AudioFormat clientFormat = new AudioFormat.Builder().setEncoding(recordingInfo[0]).setChannelMask(recordingInfo[1]).setSampleRate(recordingInfo[2]).build();
                final AudioFormat deviceFormat = new AudioFormat.Builder().setEncoding(recordingInfo[3]).setChannelMask(recordingInfo[4]).setSampleRate(recordingInfo[5]).build();
                final int patchHandle = recordingInfo[6];
                final Integer sessionKey = new Integer(session);
                if (mRecordConfigs.containsKey(sessionKey)) {
                    final AudioRecordingConfiguration updatedConfig = new AudioRecordingConfiguration(session, source, clientFormat, deviceFormat, patchHandle);
                    if (updatedConfig.equals(mRecordConfigs.get(sessionKey))) {
                        configChanged = false;
                    } else {
                        // config exists but has been modified
                        mRecordConfigs.remove(sessionKey);
                        mRecordConfigs.put(sessionKey, updatedConfig);
                        configChanged = true;
                    }
                } else {
                    mRecordConfigs.put(sessionKey, new AudioRecordingConfiguration(session, source, clientFormat, deviceFormat, patchHandle));
                    configChanged = true;
                }
                break;
            default:
                Log.e(TAG, String.format("Unknown event %d for session %d, source %d", event, session, source));
                configChanged = false;
        }
        if (configChanged) {
            configs = new ArrayList<AudioRecordingConfiguration>(mRecordConfigs.values());
        } else {
            configs = null;
        }
    }
    return configs;
}
Also used : AudioRecordingConfiguration(android.media.AudioRecordingConfiguration) AudioFormat(android.media.AudioFormat)

Example 13 with AudioFormat

use of android.media.AudioFormat in project ExoPlayer by google.

the class AudioTrack method createHwAvSyncAudioTrackV21.

/**
   * Instantiates an {@link android.media.AudioTrack} to be used with tunneling video playback.
   */
@TargetApi(21)
private static android.media.AudioTrack createHwAvSyncAudioTrackV21(int sampleRate, int channelConfig, int encoding, int bufferSize, int sessionId) {
    AudioAttributes attributesBuilder = new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).setContentType(AudioAttributes.CONTENT_TYPE_MOVIE).setFlags(AudioAttributes.FLAG_HW_AV_SYNC).build();
    AudioFormat format = new AudioFormat.Builder().setChannelMask(channelConfig).setEncoding(encoding).setSampleRate(sampleRate).build();
    return new android.media.AudioTrack(attributesBuilder, format, bufferSize, MODE_STREAM, sessionId);
}
Also used : AudioAttributes(android.media.AudioAttributes) AudioFormat(android.media.AudioFormat) TargetApi(android.annotation.TargetApi)

Example 14 with AudioFormat

use of android.media.AudioFormat in project android_frameworks_base by AOSPA.

the class BlockingAudioTrack method createStreamingAudioTrack.

private AudioTrack createStreamingAudioTrack() {
    final int channelConfig = getChannelConfig(mChannelCount);
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(mSampleRateInHz, channelConfig, mAudioFormat);
    int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);
    AudioFormat audioFormat = (new AudioFormat.Builder()).setChannelMask(channelConfig).setEncoding(mAudioFormat).setSampleRate(mSampleRateInHz).build();
    AudioTrack audioTrack = new AudioTrack(mAudioParams.mAudioAttributes, audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM, mAudioParams.mSessionId);
    if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        Log.w(TAG, "Unable to create audio track.");
        audioTrack.release();
        return null;
    }
    mAudioBufferSize = bufferSizeInBytes;
    setupVolume(audioTrack, mAudioParams.mVolume, mAudioParams.mPan);
    return audioTrack;
}
Also used : AudioTrack(android.media.AudioTrack) AudioFormat(android.media.AudioFormat)

Example 15 with AudioFormat

use of android.media.AudioFormat in project android_frameworks_base by DirtyUnicorns.

the class AudioPolicy method createAudioRecordSink.

/**
     * Create an {@link AudioRecord} instance that is associated with the given {@link AudioMix}.
     * Audio buffers recorded through the created instance will contain the mix of the audio
     * streams that fed the given mixer.
     * @param mix a non-null {@link AudioMix} instance whose routing flags was defined with
     *     {@link AudioMix#ROUTE_FLAG_LOOP_BACK}, previously added to this policy.
     * @return a new {@link AudioRecord} instance whose data format is the one defined in the
     *     {@link AudioMix}, or null if this policy was not successfully registered
     *     with {@link AudioManager#registerAudioPolicy(AudioPolicy)}.
     * @throws IllegalArgumentException
     */
@SystemApi
public AudioRecord createAudioRecordSink(AudioMix mix) throws IllegalArgumentException {
    if (!policyReadyToUse()) {
        Log.e(TAG, "Cannot create AudioRecord sink for AudioMix");
        return null;
    }
    checkMixReadyToUse(mix, false);
    // create an AudioFormat from the mix format compatible with recording, as the mix
    // was defined for playback
    AudioFormat mixFormat = new AudioFormat.Builder(mix.getFormat()).setChannelMask(AudioFormat.inChannelMaskFromOutChannelMask(mix.getFormat().getChannelMask())).build();
    // create the AudioRecord, configured for loop back, using the same format as the mix
    AudioRecord ar = new AudioRecord(new AudioAttributes.Builder().setInternalCapturePreset(MediaRecorder.AudioSource.REMOTE_SUBMIX).addTag(addressForTag(mix)).build(), mixFormat, AudioRecord.getMinBufferSize(mix.getFormat().getSampleRate(), // using stereo for buffer size to avoid the current poor support for masks
    AudioFormat.CHANNEL_IN_STEREO, mix.getFormat().getEncoding()), AudioManager.AUDIO_SESSION_ID_GENERATE);
    return ar;
}
Also used : AudioRecord(android.media.AudioRecord) AudioFormat(android.media.AudioFormat) SystemApi(android.annotation.SystemApi)

Aggregations

AudioFormat (android.media.AudioFormat)20 SystemApi (android.annotation.SystemApi)5 AudioRecord (android.media.AudioRecord)5 AudioRecordingConfiguration (android.media.AudioRecordingConfiguration)5 AudioTrack (android.media.AudioTrack)5 TargetApi (android.annotation.TargetApi)1 AudioAttributes (android.media.AudioAttributes)1