Search in sources :

Example 21 with Format

use of com.google.android.exoplayer2.Format in project ExoPlayer by google.

the class MediaCodecVideoRenderer method getCodecMaxValues.

/**
   * Returns {@link CodecMaxValues} suitable for configuring a codec for {@code format} in a way
   * that will allow possible adaptation to other compatible formats in {@code streamFormats}.
   *
   * @param codecInfo Information about the {@link MediaCodec} being configured.
   * @param format The format for which the codec is being configured.
   * @param streamFormats The possible stream formats.
   * @return Suitable {@link CodecMaxValues}.
   * @throws DecoderQueryException If an error occurs querying {@code codecInfo}.
   */
private static CodecMaxValues getCodecMaxValues(MediaCodecInfo codecInfo, Format format, Format[] streamFormats) throws DecoderQueryException {
    int maxWidth = format.width;
    int maxHeight = format.height;
    int maxInputSize = getMaxInputSize(format);
    if (streamFormats.length == 1) {
        // being configured.
        return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
    }
    boolean haveUnknownDimensions = false;
    for (Format streamFormat : streamFormats) {
        if (areAdaptationCompatible(format, streamFormat)) {
            haveUnknownDimensions |= (streamFormat.width == Format.NO_VALUE || streamFormat.height == Format.NO_VALUE);
            maxWidth = Math.max(maxWidth, streamFormat.width);
            maxHeight = Math.max(maxHeight, streamFormat.height);
            maxInputSize = Math.max(maxInputSize, getMaxInputSize(streamFormat));
        }
    }
    if (haveUnknownDimensions) {
        Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight);
        Point codecMaxSize = getCodecMaxSize(codecInfo, format);
        if (codecMaxSize != null) {
            maxWidth = Math.max(maxWidth, codecMaxSize.x);
            maxHeight = Math.max(maxHeight, codecMaxSize.y);
            maxInputSize = Math.max(maxInputSize, getMaxInputSize(format.sampleMimeType, maxWidth, maxHeight));
            Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight);
        }
    }
    return new CodecMaxValues(maxWidth, maxHeight, maxInputSize);
}
Also used : MediaFormat(android.media.MediaFormat) Format(com.google.android.exoplayer2.Format) Point(android.graphics.Point) SuppressLint(android.annotation.SuppressLint) Point(android.graphics.Point)

Example 22 with Format

use of com.google.android.exoplayer2.Format in project ExoPlayer by google.

the class Util method parseXsDateTime.

/**
   * Parses an xs:dateTime attribute value, returning the parsed timestamp in milliseconds since
   * the epoch.
   *
   * @param value The attribute value to decode.
   * @return The parsed timestamp in milliseconds since the epoch.
   * @throws ParserException if an error occurs parsing the dateTime attribute value.
   */
public static long parseXsDateTime(String value) throws ParserException {
    Matcher matcher = XS_DATE_TIME_PATTERN.matcher(value);
    if (!matcher.matches()) {
        throw new ParserException("Invalid date/time format: " + value);
    }
    int timezoneShift;
    if (matcher.group(9) == null) {
        // No time zone specified.
        timezoneShift = 0;
    } else if (matcher.group(9).equalsIgnoreCase("Z")) {
        timezoneShift = 0;
    } else {
        timezoneShift = ((Integer.parseInt(matcher.group(12)) * 60 + Integer.parseInt(matcher.group(13))));
        if (matcher.group(11).equals("-")) {
            timezoneShift *= -1;
        }
    }
    Calendar dateTime = new GregorianCalendar(TimeZone.getTimeZone("GMT"));
    dateTime.clear();
    // Note: The month value is 0-based, hence the -1 on group(2)
    dateTime.set(Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)) - 1, Integer.parseInt(matcher.group(3)), Integer.parseInt(matcher.group(4)), Integer.parseInt(matcher.group(5)), Integer.parseInt(matcher.group(6)));
    if (!TextUtils.isEmpty(matcher.group(8))) {
        final BigDecimal bd = new BigDecimal("0." + matcher.group(8));
        // we care only for milliseconds, so movePointRight(3)
        dateTime.set(Calendar.MILLISECOND, bd.movePointRight(3).intValue());
    }
    long time = dateTime.getTimeInMillis();
    if (timezoneShift != 0) {
        time -= timezoneShift * 60000;
    }
    return time;
}
Also used : ParserException(com.google.android.exoplayer2.ParserException) Matcher(java.util.regex.Matcher) Calendar(java.util.Calendar) GregorianCalendar(java.util.GregorianCalendar) GregorianCalendar(java.util.GregorianCalendar) Point(android.graphics.Point) BigDecimal(java.math.BigDecimal)

Example 23 with Format

use of com.google.android.exoplayer2.Format in project ExoPlayer by google.

the class HlsChunkSource method getNextChunk.

/**
   * Returns the next chunk to load.
   * <p>
   * If a chunk is available then {@link HlsChunkHolder#chunk} is set. If the end of the stream has
   * been reached then {@link HlsChunkHolder#endOfStream} is set. If a chunk is not available but
   * the end of the stream has not been reached, {@link HlsChunkHolder#playlist} is set to
   * contain the {@link HlsUrl} that refers to the playlist that needs refreshing.
   *
   * @param previous The most recently loaded media chunk.
   * @param playbackPositionUs The current playback position. If {@code previous} is null then this
   *     parameter is the position from which playback is expected to start (or restart) and hence
   *     should be interpreted as a seek position.
   * @param out A holder to populate.
   */
public void getNextChunk(HlsMediaChunk previous, long playbackPositionUs, HlsChunkHolder out) {
    int oldVariantIndex = previous == null ? C.INDEX_UNSET : trackGroup.indexOf(previous.trackFormat);
    // Use start time of the previous chunk rather than its end time because switching format will
    // require downloading overlapping segments.
    long bufferedDurationUs = previous == null ? 0 : Math.max(0, previous.startTimeUs - playbackPositionUs);
    // Select the variant.
    trackSelection.updateSelectedTrack(bufferedDurationUs);
    int selectedVariantIndex = trackSelection.getSelectedIndexInTrackGroup();
    boolean switchingVariant = oldVariantIndex != selectedVariantIndex;
    HlsUrl selectedUrl = variants[selectedVariantIndex];
    if (!playlistTracker.isSnapshotValid(selectedUrl)) {
        out.playlist = selectedUrl;
        // Retry when playlist is refreshed.
        return;
    }
    HlsMediaPlaylist mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedUrl);
    // Select the chunk.
    int chunkMediaSequence;
    if (previous == null || switchingVariant) {
        long targetPositionUs = previous == null ? playbackPositionUs : previous.startTimeUs;
        if (!mediaPlaylist.hasEndTag && targetPositionUs > mediaPlaylist.getEndTimeUs()) {
            // If the playlist is too old to contain the chunk, we need to refresh it.
            chunkMediaSequence = mediaPlaylist.mediaSequence + mediaPlaylist.segments.size();
        } else {
            chunkMediaSequence = Util.binarySearchFloor(mediaPlaylist.segments, targetPositionUs - mediaPlaylist.startTimeUs, true, !playlistTracker.isLive() || previous == null) + mediaPlaylist.mediaSequence;
            if (chunkMediaSequence < mediaPlaylist.mediaSequence && previous != null) {
                // We try getting the next chunk without adapting in case that's the reason for falling
                // behind the live window.
                selectedVariantIndex = oldVariantIndex;
                selectedUrl = variants[selectedVariantIndex];
                mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedUrl);
                chunkMediaSequence = previous.getNextChunkIndex();
            }
        }
    } else {
        chunkMediaSequence = previous.getNextChunkIndex();
    }
    if (chunkMediaSequence < mediaPlaylist.mediaSequence) {
        fatalError = new BehindLiveWindowException();
        return;
    }
    int chunkIndex = chunkMediaSequence - mediaPlaylist.mediaSequence;
    if (chunkIndex >= mediaPlaylist.segments.size()) {
        if (mediaPlaylist.hasEndTag) {
            out.endOfStream = true;
        } else /* Live */
        {
            out.playlist = selectedUrl;
        }
        return;
    }
    // Handle encryption.
    HlsMediaPlaylist.Segment segment = mediaPlaylist.segments.get(chunkIndex);
    // Check if encryption is specified.
    if (segment.isEncrypted) {
        Uri keyUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.encryptionKeyUri);
        if (!keyUri.equals(encryptionKeyUri)) {
            // Encryption is specified and the key has changed.
            out.chunk = newEncryptionKeyChunk(keyUri, segment.encryptionIV, selectedVariantIndex, trackSelection.getSelectionReason(), trackSelection.getSelectionData());
            return;
        }
        if (!Util.areEqual(segment.encryptionIV, encryptionIvString)) {
            setEncryptionData(keyUri, segment.encryptionIV, encryptionKey);
        }
    } else {
        clearEncryptionData();
    }
    DataSpec initDataSpec = null;
    Segment initSegment = mediaPlaylist.initializationSegment;
    if (initSegment != null) {
        Uri initSegmentUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, initSegment.url);
        initDataSpec = new DataSpec(initSegmentUri, initSegment.byterangeOffset, initSegment.byterangeLength, null);
    }
    // Compute start time of the next chunk.
    long startTimeUs = mediaPlaylist.startTimeUs + segment.relativeStartTimeUs;
    int discontinuitySequence = mediaPlaylist.discontinuitySequence + segment.relativeDiscontinuitySequence;
    TimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(discontinuitySequence);
    // Configure the data source and spec for the chunk.
    Uri chunkUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.url);
    DataSpec dataSpec = new DataSpec(chunkUri, segment.byterangeOffset, segment.byterangeLength, null);
    out.chunk = new HlsMediaChunk(mediaDataSource, dataSpec, initDataSpec, selectedUrl, muxedCaptionFormats, trackSelection.getSelectionReason(), trackSelection.getSelectionData(), startTimeUs, startTimeUs + segment.durationUs, chunkMediaSequence, discontinuitySequence, isTimestampMaster, timestampAdjuster, previous, encryptionKey, encryptionIv);
}
Also used : Segment(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment) HlsUrl(com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.HlsUrl) BehindLiveWindowException(com.google.android.exoplayer2.source.BehindLiveWindowException) DataSpec(com.google.android.exoplayer2.upstream.DataSpec) HlsMediaPlaylist(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist) TimestampAdjuster(com.google.android.exoplayer2.util.TimestampAdjuster) Uri(android.net.Uri) Segment(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment)

Example 24 with Format

use of com.google.android.exoplayer2.Format in project ExoPlayer by google.

the class HlsMediaChunk method loadMedia.

private void loadMedia() throws IOException, InterruptedException {
    // If we previously fed part of this chunk to the extractor, we need to skip it this time. For
    // encrypted content we need to skip the data by reading it through the source, so as to ensure
    // correct decryption of the remainder of the chunk. For clear content, we can request the
    // remainder of the chunk directly.
    DataSpec loadDataSpec;
    boolean skipLoadedBytes;
    if (isEncrypted) {
        loadDataSpec = dataSpec;
        skipLoadedBytes = bytesLoaded != 0;
    } else {
        loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
        skipLoadedBytes = false;
    }
    if (!isMasterTimestampSource) {
        timestampAdjuster.waitUntilInitialized();
    } else if (timestampAdjuster.getFirstSampleTimestampUs() == TimestampAdjuster.DO_NOT_OFFSET) {
        // We're the master and we haven't set the desired first sample timestamp yet.
        timestampAdjuster.setFirstSampleTimestampUs(startTimeUs);
    }
    try {
        ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec));
        if (extractor == null) {
            // Media segment format is packed audio.
            long id3Timestamp = peekId3PrivTimestamp(input);
            extractor = buildPackedAudioExtractor(id3Timestamp != C.TIME_UNSET ? timestampAdjuster.adjustTsTimestamp(id3Timestamp) : startTimeUs);
        }
        if (skipLoadedBytes) {
            input.skipFully(bytesLoaded);
        }
        try {
            int result = Extractor.RESULT_CONTINUE;
            while (result == Extractor.RESULT_CONTINUE && !loadCanceled) {
                result = extractor.read(input, null);
            }
        } finally {
            bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
        }
    } finally {
        Util.closeQuietly(dataSource);
    }
    loadCompleted = true;
}
Also used : ExtractorInput(com.google.android.exoplayer2.extractor.ExtractorInput) DefaultExtractorInput(com.google.android.exoplayer2.extractor.DefaultExtractorInput) DataSpec(com.google.android.exoplayer2.upstream.DataSpec) DefaultExtractorInput(com.google.android.exoplayer2.extractor.DefaultExtractorInput)

Example 25 with Format

use of com.google.android.exoplayer2.Format in project ExoPlayer by google.

the class HlsSampleStreamWrapper method buildTracks.

/**
   * Builds tracks that are exposed by this {@link HlsSampleStreamWrapper} instance, as well as
   * internal data-structures required for operation.
   * <p>
   * Tracks in HLS are complicated. A HLS master playlist contains a number of "variants". Each
   * variant stream typically contains muxed video, audio and (possibly) additional audio, metadata
   * and caption tracks. We wish to allow the user to select between an adaptive track that spans
   * all variants, as well as each individual variant. If multiple audio tracks are present within
   * each variant then we wish to allow the user to select between those also.
   * <p>
   * To do this, tracks are constructed as follows. The {@link HlsChunkSource} exposes (N+1) tracks,
   * where N is the number of variants defined in the HLS master playlist. These consist of one
   * adaptive track defined to span all variants and a track for each individual variant. The
   * adaptive track is initially selected. The extractor is then prepared to discover the tracks
   * inside of each variant stream. The two sets of tracks are then combined by this method to
   * create a third set, which is the set exposed by this {@link HlsSampleStreamWrapper}:
   * <ul>
   * <li>The extractor tracks are inspected to infer a "primary" track type. If a video track is
   * present then it is always the primary type. If not, audio is the primary type if present.
   * Else text is the primary type if present. Else there is no primary type.</li>
   * <li>If there is exactly one extractor track of the primary type, it's expanded into (N+1)
   * exposed tracks, all of which correspond to the primary extractor track and each of which
   * corresponds to a different chunk source track. Selecting one of these tracks has the effect
   * of switching the selected track on the chunk source.</li>
   * <li>All other extractor tracks are exposed directly. Selecting one of these tracks has the
   * effect of selecting an extractor track, leaving the selected track on the chunk source
   * unchanged.</li>
   * </ul>
   */
private void buildTracks() {
    // Iterate through the extractor tracks to discover the "primary" track type, and the index
    // of the single track of this type.
    int primaryExtractorTrackType = PRIMARY_TYPE_NONE;
    int primaryExtractorTrackIndex = C.INDEX_UNSET;
    int extractorTrackCount = sampleQueues.size();
    for (int i = 0; i < extractorTrackCount; i++) {
        String sampleMimeType = sampleQueues.valueAt(i).getUpstreamFormat().sampleMimeType;
        int trackType;
        if (MimeTypes.isVideo(sampleMimeType)) {
            trackType = PRIMARY_TYPE_VIDEO;
        } else if (MimeTypes.isAudio(sampleMimeType)) {
            trackType = PRIMARY_TYPE_AUDIO;
        } else if (MimeTypes.isText(sampleMimeType)) {
            trackType = PRIMARY_TYPE_TEXT;
        } else {
            trackType = PRIMARY_TYPE_NONE;
        }
        if (trackType > primaryExtractorTrackType) {
            primaryExtractorTrackType = trackType;
            primaryExtractorTrackIndex = i;
        } else if (trackType == primaryExtractorTrackType && primaryExtractorTrackIndex != C.INDEX_UNSET) {
            // We have multiple tracks of the primary type. We only want an index if there only exists a
            // single track of the primary type, so unset the index again.
            primaryExtractorTrackIndex = C.INDEX_UNSET;
        }
    }
    TrackGroup chunkSourceTrackGroup = chunkSource.getTrackGroup();
    int chunkSourceTrackCount = chunkSourceTrackGroup.length;
    // Instantiate the necessary internal data-structures.
    primaryTrackGroupIndex = C.INDEX_UNSET;
    groupEnabledStates = new boolean[extractorTrackCount];
    // Construct the set of exposed track groups.
    TrackGroup[] trackGroups = new TrackGroup[extractorTrackCount];
    for (int i = 0; i < extractorTrackCount; i++) {
        Format sampleFormat = sampleQueues.valueAt(i).getUpstreamFormat();
        if (i == primaryExtractorTrackIndex) {
            Format[] formats = new Format[chunkSourceTrackCount];
            for (int j = 0; j < chunkSourceTrackCount; j++) {
                formats[j] = deriveFormat(chunkSourceTrackGroup.getFormat(j), sampleFormat);
            }
            trackGroups[i] = new TrackGroup(formats);
            primaryTrackGroupIndex = i;
        } else {
            Format trackFormat = primaryExtractorTrackType == PRIMARY_TYPE_VIDEO && MimeTypes.isAudio(sampleFormat.sampleMimeType) ? muxedAudioFormat : null;
            trackGroups[i] = new TrackGroup(deriveFormat(trackFormat, sampleFormat));
        }
    }
    this.trackGroups = new TrackGroupArray(trackGroups);
}
Also used : Format(com.google.android.exoplayer2.Format) TrackGroup(com.google.android.exoplayer2.source.TrackGroup) TrackGroupArray(com.google.android.exoplayer2.source.TrackGroupArray)

Aggregations

Format (com.google.android.exoplayer2.Format)44 Point (android.graphics.Point)8 ArrayList (java.util.ArrayList)8 TrackGroup (com.google.android.exoplayer2.source.TrackGroup)7 DataSpec (com.google.android.exoplayer2.upstream.DataSpec)5 SuppressLint (android.annotation.SuppressLint)4 MediaFormat (android.media.MediaFormat)4 ParserException (com.google.android.exoplayer2.ParserException)4 DrmInitData (com.google.android.exoplayer2.drm.DrmInitData)4 ParsableByteArray (com.google.android.exoplayer2.util.ParsableByteArray)4 TrackOutput (com.google.android.exoplayer2.extractor.TrackOutput)3 TrackGroupArray (com.google.android.exoplayer2.source.TrackGroupArray)3 Representation (com.google.android.exoplayer2.source.dash.manifest.Representation)3 SchemeData (com.google.android.exoplayer2.drm.DrmInitData.SchemeData)2 MediaCodecInfo (com.google.android.exoplayer2.mediacodec.MediaCodecInfo)2 Metadata (com.google.android.exoplayer2.metadata.Metadata)2 ContainerMediaChunk (com.google.android.exoplayer2.source.chunk.ContainerMediaChunk)2 AdaptationSet (com.google.android.exoplayer2.source.dash.manifest.AdaptationSet)2 RangedUri (com.google.android.exoplayer2.source.dash.manifest.RangedUri)2 SingleSegmentBase (com.google.android.exoplayer2.source.dash.manifest.SegmentBase.SingleSegmentBase)2