use of com.google.android.exoplayer2.audio.AudioProcessor in project ExoPlayer by google.
the class SimpleExoPlayer method buildAudioRenderers.
/**
* Builds audio renderers for use by the player.
*
* @param context The {@link Context} associated with the player.
* @param mainHandler A handler associated with the main thread's looper.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
* not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode.
* @param eventListener An event listener.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio buffers
* before output. May be empty.
* @param out An array to which the built renderers should be appended.
*/
protected void buildAudioRenderers(Context context, Handler mainHandler, DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, @ExtensionRendererMode int extensionRendererMode, AudioRendererEventListener eventListener, AudioProcessor[] audioProcessors, ArrayList<Renderer> out) {
out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
return;
}
int extensionRendererIndex = out.size();
if (extensionRendererMode == EXTENSION_RENDERER_MODE_PREFER) {
extensionRendererIndex--;
}
try {
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibopusAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded LibflacAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
Constructor<?> constructor = clazz.getConstructor(Handler.class, AudioRendererEventListener.class, AudioProcessor[].class);
Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener, audioProcessors);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded FfmpegAudioRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.google.android.exoplayer2.audio.AudioProcessor in project ExoPlayer by google.
the class DefaultAudioSink method configure.
@Override
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels) throws ConfigurationException {
int inputPcmFrameSize;
@Nullable AudioProcessor[] availableAudioProcessors;
@OutputMode int outputMode;
@C.Encoding int outputEncoding;
int outputSampleRate;
int outputChannelConfig;
int outputPcmFrameSize;
if (MimeTypes.AUDIO_RAW.equals(inputFormat.sampleMimeType)) {
Assertions.checkArgument(Util.isEncodingLinearPcm(inputFormat.pcmEncoding));
inputPcmFrameSize = Util.getPcmFrameSize(inputFormat.pcmEncoding, inputFormat.channelCount);
availableAudioProcessors = shouldUseFloatOutput(inputFormat.pcmEncoding) ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
trimmingAudioProcessor.setTrimFrameCount(inputFormat.encoderDelay, inputFormat.encoderPadding);
if (Util.SDK_INT < 21 && inputFormat.channelCount == 8 && outputChannels == null) {
// AudioTrack doesn't support 8 channel output before Android L. Discard the last two (side)
// channels to give a 6 channel stream that is supported.
outputChannels = new int[6];
for (int i = 0; i < outputChannels.length; i++) {
outputChannels[i] = i;
}
}
channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat outputFormat = new AudioProcessor.AudioFormat(inputFormat.sampleRate, inputFormat.channelCount, inputFormat.pcmEncoding);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
if (audioProcessor.isActive()) {
outputFormat = nextFormat;
}
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e, inputFormat);
}
}
outputMode = OUTPUT_MODE_PCM;
outputEncoding = outputFormat.encoding;
outputSampleRate = outputFormat.sampleRate;
outputChannelConfig = Util.getAudioTrackChannelConfig(outputFormat.channelCount);
outputPcmFrameSize = Util.getPcmFrameSize(outputEncoding, outputFormat.channelCount);
} else {
inputPcmFrameSize = C.LENGTH_UNSET;
availableAudioProcessors = new AudioProcessor[0];
outputSampleRate = inputFormat.sampleRate;
outputPcmFrameSize = C.LENGTH_UNSET;
if (useOffloadedPlayback(inputFormat, audioAttributes)) {
outputMode = OUTPUT_MODE_OFFLOAD;
outputEncoding = MimeTypes.getEncoding(checkNotNull(inputFormat.sampleMimeType), inputFormat.codecs);
outputChannelConfig = Util.getAudioTrackChannelConfig(inputFormat.channelCount);
} else {
outputMode = OUTPUT_MODE_PASSTHROUGH;
@Nullable Pair<Integer, Integer> encodingAndChannelConfig = getEncodingAndChannelConfigForPassthrough(inputFormat, audioCapabilities);
if (encodingAndChannelConfig == null) {
throw new ConfigurationException("Unable to configure passthrough for: " + inputFormat, inputFormat);
}
outputEncoding = encodingAndChannelConfig.first;
outputChannelConfig = encodingAndChannelConfig.second;
}
}
int bufferSize = specifiedBufferSize != 0 ? specifiedBufferSize : audioTrackBufferSizeProvider.getBufferSizeInBytes(getAudioTrackMinBufferSize(outputSampleRate, outputChannelConfig, outputEncoding), outputEncoding, outputMode, outputPcmFrameSize, outputSampleRate, enableAudioTrackPlaybackParams ? MAX_PLAYBACK_SPEED : DEFAULT_PLAYBACK_SPEED);
if (outputEncoding == C.ENCODING_INVALID) {
throw new ConfigurationException("Invalid output encoding (mode=" + outputMode + ") for: " + inputFormat, inputFormat);
}
if (outputChannelConfig == AudioFormat.CHANNEL_INVALID) {
throw new ConfigurationException("Invalid output channel config (mode=" + outputMode + ") for: " + inputFormat, inputFormat);
}
offloadDisabledUntilNextConfiguration = false;
Configuration pendingConfiguration = new Configuration(inputFormat, inputPcmFrameSize, outputMode, outputPcmFrameSize, outputSampleRate, outputChannelConfig, outputEncoding, bufferSize, availableAudioProcessors);
if (isAudioTrackInitialized()) {
this.pendingConfiguration = pendingConfiguration;
} else {
configuration = pendingConfiguration;
}
}
Aggregations