use of com.google.android.exoplayer2.source.SampleStream in project ExoPlayer by google.
the class HlsMediaPeriod method buildAndPrepareMainSampleStreamWrapper.
/**
* This method creates and starts preparation of the main {@link HlsSampleStreamWrapper}.
*
* <p>The main sample stream wrapper is the first element of {@link #sampleStreamWrappers}. It
* provides {@link SampleStream}s for the variant urls in the multivariant playlist. It may be
* adaptive and may contain multiple muxed tracks.
*
* <p>If chunkless preparation is allowed, the media period will try preparation without segment
* downloads. This is only possible if variants contain the CODECS attribute. If not, traditional
* preparation with segment downloads will take place. The following points apply to chunkless
* preparation:
*
* <ul>
* <li>A muxed audio track will be exposed if the codecs list contain an audio entry and the
* multivariant playlist either contains an EXT-X-MEDIA tag without the URI attribute or
* does not contain any EXT-X-MEDIA tag.
* <li>Closed captions will only be exposed if they are declared by the multivariant playlist.
* <li>An ID3 track is exposed preemptively, in case the segments contain an ID3 track.
* </ul>
*
* @param multivariantPlaylist The HLS multivariant playlist.
* @param positionUs If preparation requires any chunk downloads, the position in microseconds at
* which downloading should start. Ignored otherwise.
* @param sampleStreamWrappers List to which the built main sample stream wrapper should be added.
* @param manifestUrlIndicesPerWrapper List to which the selected variant indices should be added.
* @param overridingDrmInitData Overriding {@link DrmInitData}, keyed by protection scheme type
* (i.e. {@link DrmInitData#schemeType}).
*/
private void buildAndPrepareMainSampleStreamWrapper(HlsMultivariantPlaylist multivariantPlaylist, long positionUs, List<HlsSampleStreamWrapper> sampleStreamWrappers, List<int[]> manifestUrlIndicesPerWrapper, Map<String, DrmInitData> overridingDrmInitData) {
int[] variantTypes = new int[multivariantPlaylist.variants.size()];
int videoVariantCount = 0;
int audioVariantCount = 0;
for (int i = 0; i < multivariantPlaylist.variants.size(); i++) {
Variant variant = multivariantPlaylist.variants.get(i);
Format format = variant.format;
if (format.height > 0 || Util.getCodecsOfType(format.codecs, C.TRACK_TYPE_VIDEO) != null) {
variantTypes[i] = C.TRACK_TYPE_VIDEO;
videoVariantCount++;
} else if (Util.getCodecsOfType(format.codecs, C.TRACK_TYPE_AUDIO) != null) {
variantTypes[i] = C.TRACK_TYPE_AUDIO;
audioVariantCount++;
} else {
variantTypes[i] = C.TRACK_TYPE_UNKNOWN;
}
}
boolean useVideoVariantsOnly = false;
boolean useNonAudioVariantsOnly = false;
int selectedVariantsCount = variantTypes.length;
if (videoVariantCount > 0) {
// We've identified some variants as definitely containing video. Assume variants within the
// multivariant playlist are marked consistently, and hence that we have the full set. Filter
// out any other variants, which are likely to be audio only.
useVideoVariantsOnly = true;
selectedVariantsCount = videoVariantCount;
} else if (audioVariantCount < variantTypes.length) {
// We've identified some variants, but not all, as being audio only. Filter them out to leave
// the remaining variants, which are likely to contain video.
useNonAudioVariantsOnly = true;
selectedVariantsCount = variantTypes.length - audioVariantCount;
}
Uri[] selectedPlaylistUrls = new Uri[selectedVariantsCount];
Format[] selectedPlaylistFormats = new Format[selectedVariantsCount];
int[] selectedVariantIndices = new int[selectedVariantsCount];
int outIndex = 0;
for (int i = 0; i < multivariantPlaylist.variants.size(); i++) {
if ((!useVideoVariantsOnly || variantTypes[i] == C.TRACK_TYPE_VIDEO) && (!useNonAudioVariantsOnly || variantTypes[i] != C.TRACK_TYPE_AUDIO)) {
Variant variant = multivariantPlaylist.variants.get(i);
selectedPlaylistUrls[outIndex] = variant.url;
selectedPlaylistFormats[outIndex] = variant.format;
selectedVariantIndices[outIndex++] = i;
}
}
String codecs = selectedPlaylistFormats[0].codecs;
int numberOfVideoCodecs = Util.getCodecCountOfType(codecs, C.TRACK_TYPE_VIDEO);
int numberOfAudioCodecs = Util.getCodecCountOfType(codecs, C.TRACK_TYPE_AUDIO);
boolean codecsStringAllowsChunklessPreparation = numberOfAudioCodecs <= 1 && numberOfVideoCodecs <= 1 && numberOfAudioCodecs + numberOfVideoCodecs > 0;
@C.TrackType int trackType = !useVideoVariantsOnly && numberOfAudioCodecs > 0 ? C.TRACK_TYPE_AUDIO : C.TRACK_TYPE_DEFAULT;
String sampleStreamWrapperUid = "main";
HlsSampleStreamWrapper sampleStreamWrapper = buildSampleStreamWrapper(sampleStreamWrapperUid, trackType, selectedPlaylistUrls, selectedPlaylistFormats, multivariantPlaylist.muxedAudioFormat, multivariantPlaylist.muxedCaptionFormats, overridingDrmInitData, positionUs);
sampleStreamWrappers.add(sampleStreamWrapper);
manifestUrlIndicesPerWrapper.add(selectedVariantIndices);
if (allowChunklessPreparation && codecsStringAllowsChunklessPreparation) {
List<TrackGroup> muxedTrackGroups = new ArrayList<>();
if (numberOfVideoCodecs > 0) {
Format[] videoFormats = new Format[selectedVariantsCount];
for (int i = 0; i < videoFormats.length; i++) {
videoFormats[i] = deriveVideoFormat(selectedPlaylistFormats[i]);
}
muxedTrackGroups.add(new TrackGroup(sampleStreamWrapperUid, videoFormats));
if (numberOfAudioCodecs > 0 && (multivariantPlaylist.muxedAudioFormat != null || multivariantPlaylist.audios.isEmpty())) {
muxedTrackGroups.add(new TrackGroup(/* id= */
sampleStreamWrapperUid + ":audio", deriveAudioFormat(selectedPlaylistFormats[0], multivariantPlaylist.muxedAudioFormat, /* isPrimaryTrackInVariant= */
false)));
}
List<Format> ccFormats = multivariantPlaylist.muxedCaptionFormats;
if (ccFormats != null) {
for (int i = 0; i < ccFormats.size(); i++) {
String ccId = sampleStreamWrapperUid + ":cc:" + i;
muxedTrackGroups.add(new TrackGroup(ccId, ccFormats.get(i)));
}
}
} else /* numberOfAudioCodecs > 0 */
{
// Variants only contain audio.
Format[] audioFormats = new Format[selectedVariantsCount];
for (int i = 0; i < audioFormats.length; i++) {
audioFormats[i] = deriveAudioFormat(/* variantFormat= */
selectedPlaylistFormats[i], multivariantPlaylist.muxedAudioFormat, /* isPrimaryTrackInVariant= */
true);
}
muxedTrackGroups.add(new TrackGroup(sampleStreamWrapperUid, audioFormats));
}
TrackGroup id3TrackGroup = new TrackGroup(/* id= */
sampleStreamWrapperUid + ":id3", new Format.Builder().setId("ID3").setSampleMimeType(MimeTypes.APPLICATION_ID3).build());
muxedTrackGroups.add(id3TrackGroup);
sampleStreamWrapper.prepareWithMultivariantPlaylistInfo(muxedTrackGroups.toArray(new TrackGroup[0]), /* primaryTrackGroupIndex= */
0, /* optionalTrackGroupsIndices...= */
muxedTrackGroups.indexOf(id3TrackGroup));
}
}
use of com.google.android.exoplayer2.source.SampleStream in project ExoPlayer by google.
the class HlsMediaPeriod method selectTracks.
@Override
public long selectTracks(@NullableType ExoTrackSelection[] selections, boolean[] mayRetainStreamFlags, @NullableType SampleStream[] streams, boolean[] streamResetFlags, long positionUs) {
// Map each selection and stream onto a child period index.
int[] streamChildIndices = new int[selections.length];
int[] selectionChildIndices = new int[selections.length];
for (int i = 0; i < selections.length; i++) {
streamChildIndices[i] = streams[i] == null ? C.INDEX_UNSET : streamWrapperIndices.get(streams[i]);
selectionChildIndices[i] = C.INDEX_UNSET;
if (selections[i] != null) {
TrackGroup trackGroup = selections[i].getTrackGroup();
for (int j = 0; j < sampleStreamWrappers.length; j++) {
if (sampleStreamWrappers[j].getTrackGroups().indexOf(trackGroup) != C.INDEX_UNSET) {
selectionChildIndices[i] = j;
break;
}
}
}
}
boolean forceReset = false;
streamWrapperIndices.clear();
// Select tracks for each child, copying the resulting streams back into a new streams array.
SampleStream[] newStreams = new SampleStream[selections.length];
@NullableType SampleStream[] childStreams = new SampleStream[selections.length];
@NullableType ExoTrackSelection[] childSelections = new ExoTrackSelection[selections.length];
int newEnabledSampleStreamWrapperCount = 0;
HlsSampleStreamWrapper[] newEnabledSampleStreamWrappers = new HlsSampleStreamWrapper[sampleStreamWrappers.length];
for (int i = 0; i < sampleStreamWrappers.length; i++) {
for (int j = 0; j < selections.length; j++) {
childStreams[j] = streamChildIndices[j] == i ? streams[j] : null;
childSelections[j] = selectionChildIndices[j] == i ? selections[j] : null;
}
HlsSampleStreamWrapper sampleStreamWrapper = sampleStreamWrappers[i];
boolean wasReset = sampleStreamWrapper.selectTracks(childSelections, mayRetainStreamFlags, childStreams, streamResetFlags, positionUs, forceReset);
boolean wrapperEnabled = false;
for (int j = 0; j < selections.length; j++) {
SampleStream childStream = childStreams[j];
if (selectionChildIndices[j] == i) {
// Assert that the child provided a stream for the selection.
Assertions.checkNotNull(childStream);
newStreams[j] = childStream;
wrapperEnabled = true;
streamWrapperIndices.put(childStream, i);
} else if (streamChildIndices[j] == i) {
// Assert that the child cleared any previous stream.
Assertions.checkState(childStream == null);
}
}
if (wrapperEnabled) {
newEnabledSampleStreamWrappers[newEnabledSampleStreamWrapperCount] = sampleStreamWrapper;
if (newEnabledSampleStreamWrapperCount++ == 0) {
// The first enabled wrapper is always allowed to initialize timestamp adjusters. Note
// that the first wrapper will correspond to a variant, or else an audio rendition, or
// else a text rendition, in that order.
sampleStreamWrapper.setIsTimestampMaster(true);
if (wasReset || enabledSampleStreamWrappers.length == 0 || sampleStreamWrapper != enabledSampleStreamWrappers[0]) {
// The wrapper responsible for initializing the timestamp adjusters was reset or
// changed. We need to reset the timestamp adjuster provider and all other wrappers.
timestampAdjusterProvider.reset();
forceReset = true;
}
} else {
// Additional wrappers are also allowed to initialize timestamp adjusters if they contain
// audio or video, since they are expected to contain dense samples. Text wrappers are not
// permitted except in the case above in which no variant or audio rendition wrappers are
// enabled.
sampleStreamWrapper.setIsTimestampMaster(i < audioVideoSampleStreamWrapperCount);
}
}
}
// Copy the new streams back into the streams array.
System.arraycopy(newStreams, 0, streams, 0, newStreams.length);
// Update the local state.
enabledSampleStreamWrappers = Util.nullSafeArrayCopy(newEnabledSampleStreamWrappers, newEnabledSampleStreamWrapperCount);
compositeSequenceableLoader = compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(enabledSampleStreamWrappers);
return positionUs;
}
use of com.google.android.exoplayer2.source.SampleStream in project ExoPlayer by google.
the class ExoPlayerImplInternal method maybeUpdateReadingPeriod.
private void maybeUpdateReadingPeriod() {
@Nullable MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
if (readingPeriodHolder == null) {
return;
}
if (readingPeriodHolder.getNext() == null || pendingPauseAtEndOfPeriod) {
// intentionally to pause at the end of the period.
if (readingPeriodHolder.info.isFinal || pendingPauseAtEndOfPeriod) {
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
SampleStream sampleStream = readingPeriodHolder.sampleStreams[i];
// stream in case of playlist changes that cause the stream to be no longer final.
if (sampleStream != null && renderer.getStream() == sampleStream && renderer.hasReadStreamToEnd()) {
long streamEndPositionUs = readingPeriodHolder.info.durationUs != C.TIME_UNSET && readingPeriodHolder.info.durationUs != C.TIME_END_OF_SOURCE ? readingPeriodHolder.getRendererOffset() + readingPeriodHolder.info.durationUs : C.TIME_UNSET;
setCurrentStreamFinal(renderer, streamEndPositionUs);
}
}
}
return;
}
if (!hasReadingPeriodFinishedReading()) {
return;
}
if (!readingPeriodHolder.getNext().prepared && rendererPositionUs < readingPeriodHolder.getNext().getStartPositionRendererTime()) {
// The successor is not prepared yet and playback hasn't reached the transition point.
return;
}
MediaPeriodHolder oldReadingPeriodHolder = readingPeriodHolder;
TrackSelectorResult oldTrackSelectorResult = readingPeriodHolder.getTrackSelectorResult();
readingPeriodHolder = queue.advanceReadingPeriod();
TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.getTrackSelectorResult();
updatePlaybackSpeedSettingsForNewPeriod(/* newTimeline= */
playbackInfo.timeline, /* newPeriodId= */
readingPeriodHolder.info.id, /* oldTimeline= */
playbackInfo.timeline, /* oldPeriodId= */
oldReadingPeriodHolder.info.id, /* positionForTargetOffsetOverrideUs= */
C.TIME_UNSET);
if (readingPeriodHolder.prepared && readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET) {
// The new period starts with a discontinuity, so the renderers will play out all data, then
// be disabled and re-enabled when they start playing the next period.
setAllRendererStreamsFinal(/* streamEndPositionUs= */
readingPeriodHolder.getStartPositionRendererTime());
return;
}
for (int i = 0; i < renderers.length; i++) {
boolean oldRendererEnabled = oldTrackSelectorResult.isRendererEnabled(i);
boolean newRendererEnabled = newTrackSelectorResult.isRendererEnabled(i);
if (oldRendererEnabled && !renderers[i].isCurrentStreamFinal()) {
boolean isNoSampleRenderer = rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE;
RendererConfiguration oldConfig = oldTrackSelectorResult.rendererConfigurations[i];
RendererConfiguration newConfig = newTrackSelectorResult.rendererConfigurations[i];
if (!newRendererEnabled || !newConfig.equals(oldConfig) || isNoSampleRenderer) {
// The renderer will be disabled when transitioning to playing the next period, because
// there's no new selection, or because a configuration change is required, or because
// it's a no-sample renderer for which rendererOffsetUs should be updated only when
// starting to play the next period. Mark the SampleStream as final to play out any
// remaining data.
setCurrentStreamFinal(renderers[i], /* streamEndPositionUs= */
readingPeriodHolder.getStartPositionRendererTime());
}
}
}
}
use of com.google.android.exoplayer2.source.SampleStream in project ExoPlayer by google.
the class ExoPlayerImplInternal method hasReadingPeriodFinishedReading.
private boolean hasReadingPeriodFinishedReading() {
MediaPeriodHolder readingPeriodHolder = queue.getReadingPeriod();
if (!readingPeriodHolder.prepared) {
return false;
}
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
SampleStream sampleStream = readingPeriodHolder.sampleStreams[i];
if (renderer.getStream() != sampleStream || (sampleStream != null && !renderer.hasReadStreamToEnd() && !hasReachedServerSideInsertedAdsTransition(renderer, readingPeriodHolder))) {
// The current reading period is still being read by at least one renderer.
return false;
}
}
return true;
}
use of com.google.android.exoplayer2.source.SampleStream in project ExoPlayer by google.
the class EventSampleStreamTest method readData_omitSampleData_doesOmitSampleData.
@Test
public void readData_omitSampleData_doesOmitSampleData() {
long presentationTimeUs = 1000000;
EventMessage eventMessage = newEventMessageWithId(1);
EventStream eventStream = new EventStream(SCHEME_ID, VALUE, TIME_SCALE, new long[] { presentationTimeUs }, new EventMessage[] { eventMessage });
EventSampleStream sampleStream = new EventSampleStream(eventStream, FORMAT, false);
// first read - read format
readData(sampleStream);
// read the event with FLAG_OMIT_SAMPLE_DATA
int result = readData(sampleStream, SampleStream.FLAG_OMIT_SAMPLE_DATA);
assertThat(result).isEqualTo(C.RESULT_BUFFER_READ);
assertThat(inputBuffer.data).isNull();
}
Aggregations