use of com.google.android.exoplayer2.RendererCapabilities in project ExoPlayer by google.
the class DefaultTrackSelectorTest method selectTracksWithMultipleVideoTracksWithNonSeamlessAdaptiveness.
@Test
public void selectTracksWithMultipleVideoTracksWithNonSeamlessAdaptiveness() throws Exception {
FakeRendererCapabilities nonSeamlessVideoCapabilities = new FakeRendererCapabilities(C.TRACK_TYPE_VIDEO, RendererCapabilities.create(FORMAT_HANDLED, ADAPTIVE_NOT_SEAMLESS, TUNNELING_NOT_SUPPORTED));
// Should do non-seamless adaptiveness by default, so expect an adaptive selection.
Format.Builder formatBuilder = VIDEO_FORMAT.buildUpon();
TrackGroupArray trackGroups = singleTrackGroup(formatBuilder.setId("0").build(), formatBuilder.setId("1").build());
trackSelector.setParameters(defaultParameters.buildUpon().setAllowVideoNonSeamlessAdaptiveness(true));
TrackSelectorResult result = trackSelector.selectTracks(new RendererCapabilities[] { nonSeamlessVideoCapabilities }, trackGroups, periodId, TIMELINE);
assertThat(result.length).isEqualTo(1);
assertAdaptiveSelection(result.selections[0], trackGroups.get(0), 0, 1);
// If we explicitly disable non-seamless adaptiveness, expect a fixed selection.
trackSelector.setParameters(defaultParameters.buildUpon().setAllowVideoNonSeamlessAdaptiveness(false));
result = trackSelector.selectTracks(new RendererCapabilities[] { nonSeamlessVideoCapabilities }, trackGroups, periodId, TIMELINE);
assertThat(result.length).isEqualTo(1);
assertFixedSelection(result.selections[0], trackGroups.get(0), 0);
}
use of com.google.android.exoplayer2.RendererCapabilities in project ExoPlayer by google.
the class MediaPeriodQueueTest method setUp.
@Before
public void setUp() {
AnalyticsCollector analyticsCollector = new DefaultAnalyticsCollector(Clock.DEFAULT);
analyticsCollector.setPlayer(new ExoPlayer.Builder(ApplicationProvider.getApplicationContext()).build(), Looper.getMainLooper());
mediaPeriodQueue = new MediaPeriodQueue(analyticsCollector, new Handler(Looper.getMainLooper()));
mediaSourceList = new MediaSourceList(mock(MediaSourceList.MediaSourceListInfoRefreshListener.class), analyticsCollector, new Handler(Looper.getMainLooper()), PlayerId.UNSET);
rendererCapabilities = new RendererCapabilities[0];
trackSelector = mock(TrackSelector.class);
allocator = mock(Allocator.class);
}
use of com.google.android.exoplayer2.RendererCapabilities in project ExoPlayer by google.
the class DefaultTrackSelector method selectAdaptiveVideoTrack.
private static TrackSelection selectAdaptiveVideoTrack(RendererCapabilities rendererCapabilities, TrackGroupArray groups, int[][] formatSupport, int maxVideoWidth, int maxVideoHeight, int maxVideoBitrate, boolean allowNonSeamlessAdaptiveness, boolean allowMixedMimeAdaptiveness, int viewportWidth, int viewportHeight, boolean orientationMayChange, TrackSelection.Factory adaptiveVideoTrackSelectionFactory) throws ExoPlaybackException {
int requiredAdaptiveSupport = allowNonSeamlessAdaptiveness ? (RendererCapabilities.ADAPTIVE_NOT_SEAMLESS | RendererCapabilities.ADAPTIVE_SEAMLESS) : RendererCapabilities.ADAPTIVE_SEAMLESS;
boolean allowMixedMimeTypes = allowMixedMimeAdaptiveness && (rendererCapabilities.supportsMixedMimeTypeAdaptation() & requiredAdaptiveSupport) != 0;
for (int i = 0; i < groups.length; i++) {
TrackGroup group = groups.get(i);
int[] adaptiveTracks = getAdaptiveTracksForGroup(group, formatSupport[i], allowMixedMimeTypes, requiredAdaptiveSupport, maxVideoWidth, maxVideoHeight, maxVideoBitrate, viewportWidth, viewportHeight, orientationMayChange);
if (adaptiveTracks.length > 0) {
return adaptiveVideoTrackSelectionFactory.createTrackSelection(group, adaptiveTracks);
}
}
return null;
}
use of com.google.android.exoplayer2.RendererCapabilities in project ExoPlayer by google.
the class MappingTrackSelector method maybeConfigureRenderersForTunneling.
/**
* Determines whether tunneling should be enabled, replacing {@link RendererConfiguration}s in
* {@code rendererConfigurations} with configurations that enable tunneling on the appropriate
* renderers if so.
*
* @param rendererCapabilities The {@link RendererCapabilities} of the renderers for which
* {@link TrackSelection}s are to be generated.
* @param rendererTrackGroupArrays An array of {@link TrackGroupArray}s where each entry
* corresponds to the renderer of equal index in {@code renderers}.
* @param rendererFormatSupports Maps every available track to a specific level of support as
* defined by the renderer {@code FORMAT_*} constants.
* @param rendererConfigurations The renderer configurations. Configurations may be replaced with
* ones that enable tunneling as a result of this call.
* @param trackSelections The renderer track selections.
* @param tunnelingAudioSessionId The audio session id to use when tunneling, or
* {@link C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
*/
private static void maybeConfigureRenderersForTunneling(RendererCapabilities[] rendererCapabilities, TrackGroupArray[] rendererTrackGroupArrays, int[][][] rendererFormatSupports, RendererConfiguration[] rendererConfigurations, TrackSelection[] trackSelections, int tunnelingAudioSessionId) {
if (tunnelingAudioSessionId == C.AUDIO_SESSION_ID_UNSET) {
return;
}
// Check whether we can enable tunneling. To enable tunneling we require exactly one audio and
// one video renderer to support tunneling and have a selection.
int tunnelingAudioRendererIndex = -1;
int tunnelingVideoRendererIndex = -1;
boolean enableTunneling = true;
for (int i = 0; i < rendererCapabilities.length; i++) {
int rendererType = rendererCapabilities[i].getTrackType();
TrackSelection trackSelection = trackSelections[i];
if ((rendererType == C.TRACK_TYPE_AUDIO || rendererType == C.TRACK_TYPE_VIDEO) && trackSelection != null) {
if (rendererSupportsTunneling(rendererFormatSupports[i], rendererTrackGroupArrays[i], trackSelection)) {
if (rendererType == C.TRACK_TYPE_AUDIO) {
if (tunnelingAudioRendererIndex != -1) {
enableTunneling = false;
break;
} else {
tunnelingAudioRendererIndex = i;
}
} else {
if (tunnelingVideoRendererIndex != -1) {
enableTunneling = false;
break;
} else {
tunnelingVideoRendererIndex = i;
}
}
}
}
}
enableTunneling &= tunnelingAudioRendererIndex != -1 && tunnelingVideoRendererIndex != -1;
if (enableTunneling) {
RendererConfiguration tunnelingRendererConfiguration = new RendererConfiguration(tunnelingAudioSessionId);
rendererConfigurations[tunnelingAudioRendererIndex] = tunnelingRendererConfiguration;
rendererConfigurations[tunnelingVideoRendererIndex] = tunnelingRendererConfiguration;
}
}
use of com.google.android.exoplayer2.RendererCapabilities in project ExoPlayer by google.
the class MappingTrackSelector method selectTracks.
// TrackSelector implementation.
@Override
public final TrackSelectorResult selectTracks(RendererCapabilities[] rendererCapabilities, TrackGroupArray trackGroups) throws ExoPlaybackException {
// Structures into which data will be written during the selection. The extra item at the end
// of each array is to store data associated with track groups that cannot be associated with
// any renderer.
int[] rendererTrackGroupCounts = new int[rendererCapabilities.length + 1];
TrackGroup[][] rendererTrackGroups = new TrackGroup[rendererCapabilities.length + 1][];
int[][][] rendererFormatSupports = new int[rendererCapabilities.length + 1][][];
for (int i = 0; i < rendererTrackGroups.length; i++) {
rendererTrackGroups[i] = new TrackGroup[trackGroups.length];
rendererFormatSupports[i] = new int[trackGroups.length][];
}
// Determine the extent to which each renderer supports mixed mimeType adaptation.
int[] mixedMimeTypeAdaptationSupport = getMixedMimeTypeAdaptationSupport(rendererCapabilities);
// renderer provides for each track in the group.
for (int groupIndex = 0; groupIndex < trackGroups.length; groupIndex++) {
TrackGroup group = trackGroups.get(groupIndex);
// Associate the group to a preferred renderer.
int rendererIndex = findRenderer(rendererCapabilities, group);
// Evaluate the support that the renderer provides for each track in the group.
int[] rendererFormatSupport = rendererIndex == rendererCapabilities.length ? new int[group.length] : getFormatSupport(rendererCapabilities[rendererIndex], group);
// Stash the results.
int rendererTrackGroupCount = rendererTrackGroupCounts[rendererIndex];
rendererTrackGroups[rendererIndex][rendererTrackGroupCount] = group;
rendererFormatSupports[rendererIndex][rendererTrackGroupCount] = rendererFormatSupport;
rendererTrackGroupCounts[rendererIndex]++;
}
// Create a track group array for each renderer, and trim each rendererFormatSupports entry.
TrackGroupArray[] rendererTrackGroupArrays = new TrackGroupArray[rendererCapabilities.length];
int[] rendererTrackTypes = new int[rendererCapabilities.length];
for (int i = 0; i < rendererCapabilities.length; i++) {
int rendererTrackGroupCount = rendererTrackGroupCounts[i];
rendererTrackGroupArrays[i] = new TrackGroupArray(Arrays.copyOf(rendererTrackGroups[i], rendererTrackGroupCount));
rendererFormatSupports[i] = Arrays.copyOf(rendererFormatSupports[i], rendererTrackGroupCount);
rendererTrackTypes[i] = rendererCapabilities[i].getTrackType();
}
// Create a track group array for track groups not associated with a renderer.
int unassociatedTrackGroupCount = rendererTrackGroupCounts[rendererCapabilities.length];
TrackGroupArray unassociatedTrackGroupArray = new TrackGroupArray(Arrays.copyOf(rendererTrackGroups[rendererCapabilities.length], unassociatedTrackGroupCount));
TrackSelection[] trackSelections = selectTracks(rendererCapabilities, rendererTrackGroupArrays, rendererFormatSupports);
// Apply track disabling and overriding.
for (int i = 0; i < rendererCapabilities.length; i++) {
if (rendererDisabledFlags.get(i)) {
trackSelections[i] = null;
} else {
TrackGroupArray rendererTrackGroup = rendererTrackGroupArrays[i];
Map<TrackGroupArray, SelectionOverride> overrides = selectionOverrides.get(i);
SelectionOverride override = overrides == null ? null : overrides.get(rendererTrackGroup);
if (override != null) {
trackSelections[i] = override.createTrackSelection(rendererTrackGroup);
}
}
}
// Package up the track information and selections.
MappedTrackInfo mappedTrackInfo = new MappedTrackInfo(rendererTrackTypes, rendererTrackGroupArrays, mixedMimeTypeAdaptationSupport, rendererFormatSupports, unassociatedTrackGroupArray);
// Initialize the renderer configurations to the default configuration for all renderers with
// selections, and null otherwise.
RendererConfiguration[] rendererConfigurations = new RendererConfiguration[rendererCapabilities.length];
for (int i = 0; i < rendererCapabilities.length; i++) {
rendererConfigurations[i] = trackSelections[i] != null ? RendererConfiguration.DEFAULT : null;
}
// Configure audio and video renderers to use tunneling if appropriate.
maybeConfigureRenderersForTunneling(rendererCapabilities, rendererTrackGroupArrays, rendererFormatSupports, rendererConfigurations, trackSelections, tunnelingAudioSessionId);
return new TrackSelectorResult(trackGroups, new TrackSelectionArray(trackSelections), mappedTrackInfo, rendererConfigurations);
}
Aggregations