Search in sources :

Example 1 with TrackGroupArray

use of com.google.android.exoplayer2.source.TrackGroupArray in project ExoPlayer by google.

the class PlayerActivity method updateButtonVisibilities.

// User controls
private void updateButtonVisibilities() {
    debugRootView.removeAllViews();
    retryButton.setVisibility(needRetrySource ? View.VISIBLE : View.GONE);
    debugRootView.addView(retryButton);
    if (player == null) {
        return;
    }
    MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
    if (mappedTrackInfo == null) {
        return;
    }
    for (int i = 0; i < mappedTrackInfo.length; i++) {
        TrackGroupArray trackGroups = mappedTrackInfo.getTrackGroups(i);
        if (trackGroups.length != 0) {
            Button button = new Button(this);
            int label;
            switch(player.getRendererType(i)) {
                case C.TRACK_TYPE_AUDIO:
                    label = R.string.audio;
                    break;
                case C.TRACK_TYPE_VIDEO:
                    label = R.string.video;
                    break;
                case C.TRACK_TYPE_TEXT:
                    label = R.string.text;
                    break;
                default:
                    continue;
            }
            button.setText(label);
            button.setTag(i);
            button.setOnClickListener(this);
            debugRootView.addView(button, debugRootView.getChildCount() - 1);
        }
    }
}
Also used : Button(android.widget.Button) TrackGroupArray(com.google.android.exoplayer2.source.TrackGroupArray) MappedTrackInfo(com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo)

Example 2 with TrackGroupArray

use of com.google.android.exoplayer2.source.TrackGroupArray in project ExoPlayer by google.

the class PlayerActivity method onTracksChanged.

@Override
public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
    updateButtonVisibilities();
    MappedTrackInfo mappedTrackInfo = trackSelector.getCurrentMappedTrackInfo();
    if (mappedTrackInfo != null) {
        if (mappedTrackInfo.getTrackTypeRendererSupport(C.TRACK_TYPE_VIDEO) == MappedTrackInfo.RENDERER_SUPPORT_UNSUPPORTED_TRACKS) {
            showToast(R.string.error_unsupported_video);
        }
        if (mappedTrackInfo.getTrackTypeRendererSupport(C.TRACK_TYPE_AUDIO) == MappedTrackInfo.RENDERER_SUPPORT_UNSUPPORTED_TRACKS) {
            showToast(R.string.error_unsupported_audio);
        }
    }
}
Also used : MappedTrackInfo(com.google.android.exoplayer2.trackselection.MappingTrackSelector.MappedTrackInfo)

Example 3 with TrackGroupArray

use of com.google.android.exoplayer2.source.TrackGroupArray in project ExoPlayer by google.

the class ExoPlayerTest method testPlaySinglePeriodTimeline.

/**
   * Tests playback of a source that exposes a single period.
   */
public void testPlaySinglePeriodTimeline() throws Exception {
    PlayerWrapper playerWrapper = new PlayerWrapper();
    Timeline timeline = new FakeTimeline(new TimelineWindowDefinition(false, false, 0));
    Object manifest = new Object();
    MediaSource mediaSource = new FakeMediaSource(timeline, manifest, TEST_VIDEO_FORMAT);
    FakeRenderer renderer = new FakeRenderer(TEST_VIDEO_FORMAT);
    playerWrapper.setup(mediaSource, renderer);
    playerWrapper.blockUntilEnded(TIMEOUT_MS);
    assertEquals(0, playerWrapper.positionDiscontinuityCount);
    assertEquals(1, renderer.formatReadCount);
    assertEquals(1, renderer.bufferReadCount);
    assertTrue(renderer.isEnded);
    assertEquals(timeline, playerWrapper.timeline);
    assertEquals(manifest, playerWrapper.manifest);
    assertEquals(new TrackGroupArray(new TrackGroup(TEST_VIDEO_FORMAT)), playerWrapper.trackGroups);
}
Also used : MediaSource(com.google.android.exoplayer2.source.MediaSource) TrackGroup(com.google.android.exoplayer2.source.TrackGroup) TrackGroupArray(com.google.android.exoplayer2.source.TrackGroupArray)

Example 4 with TrackGroupArray

use of com.google.android.exoplayer2.source.TrackGroupArray in project ExoPlayer by google.

the class ExtractorMediaPeriod method maybeFinishPrepare.

// Internal methods.
private void maybeFinishPrepare() {
    if (released || prepared || seekMap == null || !tracksBuilt) {
        return;
    }
    int trackCount = sampleQueues.size();
    for (int i = 0; i < trackCount; i++) {
        if (sampleQueues.valueAt(i).getUpstreamFormat() == null) {
            return;
        }
    }
    loadCondition.close();
    TrackGroup[] trackArray = new TrackGroup[trackCount];
    trackIsAudioVideoFlags = new boolean[trackCount];
    trackEnabledStates = new boolean[trackCount];
    durationUs = seekMap.getDurationUs();
    for (int i = 0; i < trackCount; i++) {
        Format trackFormat = sampleQueues.valueAt(i).getUpstreamFormat();
        trackArray[i] = new TrackGroup(trackFormat);
        String mimeType = trackFormat.sampleMimeType;
        boolean isAudioVideo = MimeTypes.isVideo(mimeType) || MimeTypes.isAudio(mimeType);
        trackIsAudioVideoFlags[i] = isAudioVideo;
        haveAudioVideoTracks |= isAudioVideo;
    }
    tracks = new TrackGroupArray(trackArray);
    prepared = true;
    sourceListener.onSourceInfoRefreshed(new SinglePeriodTimeline(durationUs, seekMap.isSeekable()), null);
    callback.onPrepared(this);
}
Also used : Format(com.google.android.exoplayer2.Format)

Example 5 with TrackGroupArray

use of com.google.android.exoplayer2.source.TrackGroupArray in project ExoPlayer by google.

the class HlsSampleStreamWrapper method buildTracks.

/**
   * Builds tracks that are exposed by this {@link HlsSampleStreamWrapper} instance, as well as
   * internal data-structures required for operation.
   * <p>
   * Tracks in HLS are complicated. A HLS master playlist contains a number of "variants". Each
   * variant stream typically contains muxed video, audio and (possibly) additional audio, metadata
   * and caption tracks. We wish to allow the user to select between an adaptive track that spans
   * all variants, as well as each individual variant. If multiple audio tracks are present within
   * each variant then we wish to allow the user to select between those also.
   * <p>
   * To do this, tracks are constructed as follows. The {@link HlsChunkSource} exposes (N+1) tracks,
   * where N is the number of variants defined in the HLS master playlist. These consist of one
   * adaptive track defined to span all variants and a track for each individual variant. The
   * adaptive track is initially selected. The extractor is then prepared to discover the tracks
   * inside of each variant stream. The two sets of tracks are then combined by this method to
   * create a third set, which is the set exposed by this {@link HlsSampleStreamWrapper}:
   * <ul>
   * <li>The extractor tracks are inspected to infer a "primary" track type. If a video track is
   * present then it is always the primary type. If not, audio is the primary type if present.
   * Else text is the primary type if present. Else there is no primary type.</li>
   * <li>If there is exactly one extractor track of the primary type, it's expanded into (N+1)
   * exposed tracks, all of which correspond to the primary extractor track and each of which
   * corresponds to a different chunk source track. Selecting one of these tracks has the effect
   * of switching the selected track on the chunk source.</li>
   * <li>All other extractor tracks are exposed directly. Selecting one of these tracks has the
   * effect of selecting an extractor track, leaving the selected track on the chunk source
   * unchanged.</li>
   * </ul>
   */
private void buildTracks() {
    // Iterate through the extractor tracks to discover the "primary" track type, and the index
    // of the single track of this type.
    int primaryExtractorTrackType = PRIMARY_TYPE_NONE;
    int primaryExtractorTrackIndex = C.INDEX_UNSET;
    int extractorTrackCount = sampleQueues.size();
    for (int i = 0; i < extractorTrackCount; i++) {
        String sampleMimeType = sampleQueues.valueAt(i).getUpstreamFormat().sampleMimeType;
        int trackType;
        if (MimeTypes.isVideo(sampleMimeType)) {
            trackType = PRIMARY_TYPE_VIDEO;
        } else if (MimeTypes.isAudio(sampleMimeType)) {
            trackType = PRIMARY_TYPE_AUDIO;
        } else if (MimeTypes.isText(sampleMimeType)) {
            trackType = PRIMARY_TYPE_TEXT;
        } else {
            trackType = PRIMARY_TYPE_NONE;
        }
        if (trackType > primaryExtractorTrackType) {
            primaryExtractorTrackType = trackType;
            primaryExtractorTrackIndex = i;
        } else if (trackType == primaryExtractorTrackType && primaryExtractorTrackIndex != C.INDEX_UNSET) {
            // We have multiple tracks of the primary type. We only want an index if there only exists a
            // single track of the primary type, so unset the index again.
            primaryExtractorTrackIndex = C.INDEX_UNSET;
        }
    }
    TrackGroup chunkSourceTrackGroup = chunkSource.getTrackGroup();
    int chunkSourceTrackCount = chunkSourceTrackGroup.length;
    // Instantiate the necessary internal data-structures.
    primaryTrackGroupIndex = C.INDEX_UNSET;
    groupEnabledStates = new boolean[extractorTrackCount];
    // Construct the set of exposed track groups.
    TrackGroup[] trackGroups = new TrackGroup[extractorTrackCount];
    for (int i = 0; i < extractorTrackCount; i++) {
        Format sampleFormat = sampleQueues.valueAt(i).getUpstreamFormat();
        if (i == primaryExtractorTrackIndex) {
            Format[] formats = new Format[chunkSourceTrackCount];
            for (int j = 0; j < chunkSourceTrackCount; j++) {
                formats[j] = deriveFormat(chunkSourceTrackGroup.getFormat(j), sampleFormat);
            }
            trackGroups[i] = new TrackGroup(formats);
            primaryTrackGroupIndex = i;
        } else {
            Format trackFormat = primaryExtractorTrackType == PRIMARY_TYPE_VIDEO && MimeTypes.isAudio(sampleFormat.sampleMimeType) ? muxedAudioFormat : null;
            trackGroups[i] = new TrackGroup(deriveFormat(trackFormat, sampleFormat));
        }
    }
    this.trackGroups = new TrackGroupArray(trackGroups);
}
Also used : Format(com.google.android.exoplayer2.Format) TrackGroup(com.google.android.exoplayer2.source.TrackGroup) TrackGroupArray(com.google.android.exoplayer2.source.TrackGroupArray)

Aggregations

TrackGroupArray (com.google.android.exoplayer2.source.TrackGroupArray)125 Test (org.junit.Test)92 Format (com.google.android.exoplayer2.Format)67 TrackGroup (com.google.android.exoplayer2.source.TrackGroup)50 RendererCapabilities (com.google.android.exoplayer2.RendererCapabilities)36 Nullable (androidx.annotation.Nullable)18 FakeMediaSource (com.google.android.exoplayer2.testutil.FakeMediaSource)17 FakeTimeline (com.google.android.exoplayer2.testutil.FakeTimeline)17 TransferListener (com.google.android.exoplayer2.upstream.TransferListener)14 DrmSessionManager (com.google.android.exoplayer2.drm.DrmSessionManager)13 MediaSource (com.google.android.exoplayer2.source.MediaSource)13 MediaPeriodId (com.google.android.exoplayer2.source.MediaSource.MediaPeriodId)13 FakeMediaPeriod (com.google.android.exoplayer2.testutil.FakeMediaPeriod)13 TestExoPlayerBuilder (com.google.android.exoplayer2.testutil.TestExoPlayerBuilder)13 Allocator (com.google.android.exoplayer2.upstream.Allocator)12 ArrayList (java.util.ArrayList)11 ClippingMediaSource (com.google.android.exoplayer2.source.ClippingMediaSource)9 CompositeMediaSource (com.google.android.exoplayer2.source.CompositeMediaSource)9 ConcatenatingMediaSource (com.google.android.exoplayer2.source.ConcatenatingMediaSource)9 MaskingMediaSource (com.google.android.exoplayer2.source.MaskingMediaSource)9