Search in sources :

Example 1 with Chunk

use of com.google.android.exoplayer2.source.chunk.Chunk in project ExoPlayer by google.

the class AtomParsers method parseStbl.

/**
   * Parses an stbl atom (defined in 14496-12).
   *
   * @param track Track to which this sample table corresponds.
   * @param stblAtom stbl (sample table) atom to decode.
   * @param gaplessInfoHolder Holder to populate with gapless playback information.
   * @return Sample table described by the stbl atom.
   * @throws ParserException If the resulting sample sequence does not contain a sync sample.
   */
public static TrackSampleTable parseStbl(Track track, Atom.ContainerAtom stblAtom, GaplessInfoHolder gaplessInfoHolder) throws ParserException {
    SampleSizeBox sampleSizeBox;
    Atom.LeafAtom stszAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz);
    if (stszAtom != null) {
        sampleSizeBox = new StszSampleSizeBox(stszAtom);
    } else {
        Atom.LeafAtom stz2Atom = stblAtom.getLeafAtomOfType(Atom.TYPE_stz2);
        if (stz2Atom == null) {
            throw new ParserException("Track has no sample table size information");
        }
        sampleSizeBox = new Stz2SampleSizeBox(stz2Atom);
    }
    int sampleCount = sampleSizeBox.getSampleCount();
    if (sampleCount == 0) {
        return new TrackSampleTable(new long[0], new int[0], 0, new long[0], new int[0]);
    }
    // Entries are byte offsets of chunks.
    boolean chunkOffsetsAreLongs = false;
    Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco);
    if (chunkOffsetsAtom == null) {
        chunkOffsetsAreLongs = true;
        chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_co64);
    }
    ParsableByteArray chunkOffsets = chunkOffsetsAtom.data;
    // Entries are (chunk number, number of samples per chunk, sample description index).
    ParsableByteArray stsc = stblAtom.getLeafAtomOfType(Atom.TYPE_stsc).data;
    // Entries are (number of samples, timestamp delta between those samples).
    ParsableByteArray stts = stblAtom.getLeafAtomOfType(Atom.TYPE_stts).data;
    // Entries are the indices of samples that are synchronization samples.
    Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss);
    ParsableByteArray stss = stssAtom != null ? stssAtom.data : null;
    // Entries are (number of samples, timestamp offset).
    Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts);
    ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null;
    // Prepare to read chunk information.
    ChunkIterator chunkIterator = new ChunkIterator(stsc, chunkOffsets, chunkOffsetsAreLongs);
    // Prepare to read sample timestamps.
    stts.setPosition(Atom.FULL_HEADER_SIZE);
    int remainingTimestampDeltaChanges = stts.readUnsignedIntToInt() - 1;
    int remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
    int timestampDeltaInTimeUnits = stts.readUnsignedIntToInt();
    // Prepare to read sample timestamp offsets, if ctts is present.
    int remainingSamplesAtTimestampOffset = 0;
    int remainingTimestampOffsetChanges = 0;
    int timestampOffset = 0;
    if (ctts != null) {
        ctts.setPosition(Atom.FULL_HEADER_SIZE);
        remainingTimestampOffsetChanges = ctts.readUnsignedIntToInt();
    }
    int nextSynchronizationSampleIndex = C.INDEX_UNSET;
    int remainingSynchronizationSamples = 0;
    if (stss != null) {
        stss.setPosition(Atom.FULL_HEADER_SIZE);
        remainingSynchronizationSamples = stss.readUnsignedIntToInt();
        if (remainingSynchronizationSamples > 0) {
            nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1;
        } else {
            // Ignore empty stss boxes, which causes all samples to be treated as sync samples.
            stss = null;
        }
    }
    // True if we can rechunk fixed-sample-size data. Note that we only rechunk raw audio.
    boolean isRechunkable = sampleSizeBox.isFixedSampleSize() && MimeTypes.AUDIO_RAW.equals(track.format.sampleMimeType) && remainingTimestampDeltaChanges == 0 && remainingTimestampOffsetChanges == 0 && remainingSynchronizationSamples == 0;
    long[] offsets;
    int[] sizes;
    int maximumSize = 0;
    long[] timestamps;
    int[] flags;
    long timestampTimeUnits = 0;
    if (!isRechunkable) {
        offsets = new long[sampleCount];
        sizes = new int[sampleCount];
        timestamps = new long[sampleCount];
        flags = new int[sampleCount];
        long offset = 0;
        int remainingSamplesInChunk = 0;
        for (int i = 0; i < sampleCount; i++) {
            // Advance to the next chunk if necessary.
            while (remainingSamplesInChunk == 0) {
                Assertions.checkState(chunkIterator.moveNext());
                offset = chunkIterator.offset;
                remainingSamplesInChunk = chunkIterator.numSamples;
            }
            // Add on the timestamp offset if ctts is present.
            if (ctts != null) {
                while (remainingSamplesAtTimestampOffset == 0 && remainingTimestampOffsetChanges > 0) {
                    remainingSamplesAtTimestampOffset = ctts.readUnsignedIntToInt();
                    // The BMFF spec (ISO 14496-12) states that sample offsets should be unsigned integers
                    // in version 0 ctts boxes, however some streams violate the spec and use signed
                    // integers instead. It's safe to always decode sample offsets as signed integers here,
                    // because unsigned integers will still be parsed correctly (unless their top bit is
                    // set, which is never true in practice because sample offsets are always small).
                    timestampOffset = ctts.readInt();
                    remainingTimestampOffsetChanges--;
                }
                remainingSamplesAtTimestampOffset--;
            }
            offsets[i] = offset;
            sizes[i] = sampleSizeBox.readNextSampleSize();
            if (sizes[i] > maximumSize) {
                maximumSize = sizes[i];
            }
            timestamps[i] = timestampTimeUnits + timestampOffset;
            // All samples are synchronization samples if the stss is not present.
            flags[i] = stss == null ? C.BUFFER_FLAG_KEY_FRAME : 0;
            if (i == nextSynchronizationSampleIndex) {
                flags[i] = C.BUFFER_FLAG_KEY_FRAME;
                remainingSynchronizationSamples--;
                if (remainingSynchronizationSamples > 0) {
                    nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1;
                }
            }
            // Add on the duration of this sample.
            timestampTimeUnits += timestampDeltaInTimeUnits;
            remainingSamplesAtTimestampDelta--;
            if (remainingSamplesAtTimestampDelta == 0 && remainingTimestampDeltaChanges > 0) {
                remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
                timestampDeltaInTimeUnits = stts.readUnsignedIntToInt();
                remainingTimestampDeltaChanges--;
            }
            offset += sizes[i];
            remainingSamplesInChunk--;
        }
        Assertions.checkArgument(remainingSamplesAtTimestampOffset == 0);
        // Remove trailing ctts entries with 0-valued sample counts.
        while (remainingTimestampOffsetChanges > 0) {
            Assertions.checkArgument(ctts.readUnsignedIntToInt() == 0);
            // Ignore offset.
            ctts.readInt();
            remainingTimestampOffsetChanges--;
        }
        // still be playable.
        if (remainingSynchronizationSamples != 0 || remainingSamplesAtTimestampDelta != 0 || remainingSamplesInChunk != 0 || remainingTimestampDeltaChanges != 0) {
            Log.w(TAG, "Inconsistent stbl box for track " + track.id + ": remainingSynchronizationSamples " + remainingSynchronizationSamples + ", remainingSamplesAtTimestampDelta " + remainingSamplesAtTimestampDelta + ", remainingSamplesInChunk " + remainingSamplesInChunk + ", remainingTimestampDeltaChanges " + remainingTimestampDeltaChanges);
        }
    } else {
        long[] chunkOffsetsBytes = new long[chunkIterator.length];
        int[] chunkSampleCounts = new int[chunkIterator.length];
        while (chunkIterator.moveNext()) {
            chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset;
            chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples;
        }
        int fixedSampleSize = sampleSizeBox.readNextSampleSize();
        FixedSampleSizeRechunker.Results rechunkedResults = FixedSampleSizeRechunker.rechunk(fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits);
        offsets = rechunkedResults.offsets;
        sizes = rechunkedResults.sizes;
        maximumSize = rechunkedResults.maximumSize;
        timestamps = rechunkedResults.timestamps;
        flags = rechunkedResults.flags;
    }
    if (track.editListDurations == null || gaplessInfoHolder.hasGaplessInfo()) {
        // There is no edit list, or we are ignoring it as we already have gapless metadata to apply.
        // This implementation does not support applying both gapless metadata and an edit list.
        Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale);
        return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags);
    }
    if (track.editListDurations.length == 1 && track.type == C.TRACK_TYPE_AUDIO && timestamps.length >= 2) {
        // Handle the edit by setting gapless playback metadata, if possible. This implementation
        // assumes that only one "roll" sample is needed, which is the case for AAC, so the start/end
        // points of the edit must lie within the first/last samples respectively.
        long editStartTime = track.editListMediaTimes[0];
        long editEndTime = editStartTime + Util.scaleLargeTimestamp(track.editListDurations[0], track.timescale, track.movieTimescale);
        long lastSampleEndTime = timestampTimeUnits;
        if (timestamps[0] <= editStartTime && editStartTime < timestamps[1] && timestamps[timestamps.length - 1] < editEndTime && editEndTime <= lastSampleEndTime) {
            long paddingTimeUnits = lastSampleEndTime - editEndTime;
            long encoderDelay = Util.scaleLargeTimestamp(editStartTime - timestamps[0], track.format.sampleRate, track.timescale);
            long encoderPadding = Util.scaleLargeTimestamp(paddingTimeUnits, track.format.sampleRate, track.timescale);
            if ((encoderDelay != 0 || encoderPadding != 0) && encoderDelay <= Integer.MAX_VALUE && encoderPadding <= Integer.MAX_VALUE) {
                gaplessInfoHolder.encoderDelay = (int) encoderDelay;
                gaplessInfoHolder.encoderPadding = (int) encoderPadding;
                Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale);
                return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags);
            }
        }
    }
    if (track.editListDurations.length == 1 && track.editListDurations[0] == 0) {
        // samples in the edit.
        for (int i = 0; i < timestamps.length; i++) {
            timestamps[i] = Util.scaleLargeTimestamp(timestamps[i] - track.editListMediaTimes[0], C.MICROS_PER_SECOND, track.timescale);
        }
        return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags);
    }
    // Omit any sample at the end point of an edit for audio tracks.
    boolean omitClippedSample = track.type == C.TRACK_TYPE_AUDIO;
    // Count the number of samples after applying edits.
    int editedSampleCount = 0;
    int nextSampleIndex = 0;
    boolean copyMetadata = false;
    for (int i = 0; i < track.editListDurations.length; i++) {
        long mediaTime = track.editListMediaTimes[i];
        if (mediaTime != -1) {
            long duration = Util.scaleLargeTimestamp(track.editListDurations[i], track.timescale, track.movieTimescale);
            int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true);
            int endIndex = Util.binarySearchCeil(timestamps, mediaTime + duration, omitClippedSample, false);
            editedSampleCount += endIndex - startIndex;
            copyMetadata |= nextSampleIndex != startIndex;
            nextSampleIndex = endIndex;
        }
    }
    copyMetadata |= editedSampleCount != sampleCount;
    // Calculate edited sample timestamps and update the corresponding metadata arrays.
    long[] editedOffsets = copyMetadata ? new long[editedSampleCount] : offsets;
    int[] editedSizes = copyMetadata ? new int[editedSampleCount] : sizes;
    int editedMaximumSize = copyMetadata ? 0 : maximumSize;
    int[] editedFlags = copyMetadata ? new int[editedSampleCount] : flags;
    long[] editedTimestamps = new long[editedSampleCount];
    long pts = 0;
    int sampleIndex = 0;
    for (int i = 0; i < track.editListDurations.length; i++) {
        long mediaTime = track.editListMediaTimes[i];
        long duration = track.editListDurations[i];
        if (mediaTime != -1) {
            long endMediaTime = mediaTime + Util.scaleLargeTimestamp(duration, track.timescale, track.movieTimescale);
            int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true);
            int endIndex = Util.binarySearchCeil(timestamps, endMediaTime, omitClippedSample, false);
            if (copyMetadata) {
                int count = endIndex - startIndex;
                System.arraycopy(offsets, startIndex, editedOffsets, sampleIndex, count);
                System.arraycopy(sizes, startIndex, editedSizes, sampleIndex, count);
                System.arraycopy(flags, startIndex, editedFlags, sampleIndex, count);
            }
            for (int j = startIndex; j < endIndex; j++) {
                long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale);
                long timeInSegmentUs = Util.scaleLargeTimestamp(timestamps[j] - mediaTime, C.MICROS_PER_SECOND, track.timescale);
                editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs;
                if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) {
                    editedMaximumSize = sizes[j];
                }
                sampleIndex++;
            }
        }
        pts += duration;
    }
    boolean hasSyncSample = false;
    for (int i = 0; i < editedFlags.length && !hasSyncSample; i++) {
        hasSyncSample |= (editedFlags[i] & C.BUFFER_FLAG_KEY_FRAME) != 0;
    }
    if (!hasSyncSample) {
        throw new ParserException("The edited sample sequence does not contain a sync sample.");
    }
    return new TrackSampleTable(editedOffsets, editedSizes, editedMaximumSize, editedTimestamps, editedFlags);
}
Also used : ParserException(com.google.android.exoplayer2.ParserException) ParsableByteArray(com.google.android.exoplayer2.util.ParsableByteArray)

Example 2 with Chunk

use of com.google.android.exoplayer2.source.chunk.Chunk in project ExoPlayer by google.

the class WavHeaderReader method peek.

/**
   * Peeks and returns a {@code WavHeader}.
   *
   * @param input Input stream to peek the WAV header from.
   * @throws ParserException If the input file is an incorrect RIFF WAV.
   * @throws IOException If peeking from the input fails.
   * @throws InterruptedException If interrupted while peeking from input.
   * @return A new {@code WavHeader} peeked from {@code input}, or null if the input is not a
   *     supported WAV format.
   */
public static WavHeader peek(ExtractorInput input) throws IOException, InterruptedException {
    Assertions.checkNotNull(input);
    // Allocate a scratch buffer large enough to store the format chunk.
    ParsableByteArray scratch = new ParsableByteArray(16);
    // Attempt to read the RIFF chunk.
    ChunkHeader chunkHeader = ChunkHeader.peek(input, scratch);
    if (chunkHeader.id != Util.getIntegerCodeForString("RIFF")) {
        return null;
    }
    input.peekFully(scratch.data, 0, 4);
    scratch.setPosition(0);
    int riffFormat = scratch.readInt();
    if (riffFormat != Util.getIntegerCodeForString("WAVE")) {
        Log.e(TAG, "Unsupported RIFF format: " + riffFormat);
        return null;
    }
    // Skip chunks until we find the format chunk.
    chunkHeader = ChunkHeader.peek(input, scratch);
    while (chunkHeader.id != Util.getIntegerCodeForString("fmt ")) {
        input.advancePeekPosition((int) chunkHeader.size);
        chunkHeader = ChunkHeader.peek(input, scratch);
    }
    Assertions.checkState(chunkHeader.size >= 16);
    input.peekFully(scratch.data, 0, 16);
    scratch.setPosition(0);
    int type = scratch.readLittleEndianUnsignedShort();
    int numChannels = scratch.readLittleEndianUnsignedShort();
    int sampleRateHz = scratch.readLittleEndianUnsignedIntToInt();
    int averageBytesPerSecond = scratch.readLittleEndianUnsignedIntToInt();
    int blockAlignment = scratch.readLittleEndianUnsignedShort();
    int bitsPerSample = scratch.readLittleEndianUnsignedShort();
    int expectedBlockAlignment = numChannels * bitsPerSample / 8;
    if (blockAlignment != expectedBlockAlignment) {
        throw new ParserException("Expected block alignment: " + expectedBlockAlignment + "; got: " + blockAlignment);
    }
    @C.PcmEncoding int encoding = Util.getPcmEncoding(bitsPerSample);
    if (encoding == C.ENCODING_INVALID) {
        Log.e(TAG, "Unsupported WAV bit depth: " + bitsPerSample);
        return null;
    }
    if (type != TYPE_PCM && type != TYPE_WAVE_FORMAT_EXTENSIBLE) {
        Log.e(TAG, "Unsupported WAV format type: " + type);
        return null;
    }
    // If present, skip extensionSize, validBitsPerSample, channelMask, subFormatGuid, ...
    input.advancePeekPosition((int) chunkHeader.size - 16);
    return new WavHeader(numChannels, sampleRateHz, averageBytesPerSecond, blockAlignment, bitsPerSample, encoding);
}
Also used : ParsableByteArray(com.google.android.exoplayer2.util.ParsableByteArray) ParserException(com.google.android.exoplayer2.ParserException)

Example 3 with Chunk

use of com.google.android.exoplayer2.source.chunk.Chunk in project ExoPlayer by google.

the class HlsChunkSource method getNextChunk.

/**
   * Returns the next chunk to load.
   * <p>
   * If a chunk is available then {@link HlsChunkHolder#chunk} is set. If the end of the stream has
   * been reached then {@link HlsChunkHolder#endOfStream} is set. If a chunk is not available but
   * the end of the stream has not been reached, {@link HlsChunkHolder#playlist} is set to
   * contain the {@link HlsUrl} that refers to the playlist that needs refreshing.
   *
   * @param previous The most recently loaded media chunk.
   * @param playbackPositionUs The current playback position. If {@code previous} is null then this
   *     parameter is the position from which playback is expected to start (or restart) and hence
   *     should be interpreted as a seek position.
   * @param out A holder to populate.
   */
public void getNextChunk(HlsMediaChunk previous, long playbackPositionUs, HlsChunkHolder out) {
    int oldVariantIndex = previous == null ? C.INDEX_UNSET : trackGroup.indexOf(previous.trackFormat);
    // Use start time of the previous chunk rather than its end time because switching format will
    // require downloading overlapping segments.
    long bufferedDurationUs = previous == null ? 0 : Math.max(0, previous.startTimeUs - playbackPositionUs);
    // Select the variant.
    trackSelection.updateSelectedTrack(bufferedDurationUs);
    int selectedVariantIndex = trackSelection.getSelectedIndexInTrackGroup();
    boolean switchingVariant = oldVariantIndex != selectedVariantIndex;
    HlsUrl selectedUrl = variants[selectedVariantIndex];
    if (!playlistTracker.isSnapshotValid(selectedUrl)) {
        out.playlist = selectedUrl;
        // Retry when playlist is refreshed.
        return;
    }
    HlsMediaPlaylist mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedUrl);
    // Select the chunk.
    int chunkMediaSequence;
    if (previous == null || switchingVariant) {
        long targetPositionUs = previous == null ? playbackPositionUs : previous.startTimeUs;
        if (!mediaPlaylist.hasEndTag && targetPositionUs > mediaPlaylist.getEndTimeUs()) {
            // If the playlist is too old to contain the chunk, we need to refresh it.
            chunkMediaSequence = mediaPlaylist.mediaSequence + mediaPlaylist.segments.size();
        } else {
            chunkMediaSequence = Util.binarySearchFloor(mediaPlaylist.segments, targetPositionUs - mediaPlaylist.startTimeUs, true, !playlistTracker.isLive() || previous == null) + mediaPlaylist.mediaSequence;
            if (chunkMediaSequence < mediaPlaylist.mediaSequence && previous != null) {
                // We try getting the next chunk without adapting in case that's the reason for falling
                // behind the live window.
                selectedVariantIndex = oldVariantIndex;
                selectedUrl = variants[selectedVariantIndex];
                mediaPlaylist = playlistTracker.getPlaylistSnapshot(selectedUrl);
                chunkMediaSequence = previous.getNextChunkIndex();
            }
        }
    } else {
        chunkMediaSequence = previous.getNextChunkIndex();
    }
    if (chunkMediaSequence < mediaPlaylist.mediaSequence) {
        fatalError = new BehindLiveWindowException();
        return;
    }
    int chunkIndex = chunkMediaSequence - mediaPlaylist.mediaSequence;
    if (chunkIndex >= mediaPlaylist.segments.size()) {
        if (mediaPlaylist.hasEndTag) {
            out.endOfStream = true;
        } else /* Live */
        {
            out.playlist = selectedUrl;
        }
        return;
    }
    // Handle encryption.
    HlsMediaPlaylist.Segment segment = mediaPlaylist.segments.get(chunkIndex);
    // Check if encryption is specified.
    if (segment.isEncrypted) {
        Uri keyUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.encryptionKeyUri);
        if (!keyUri.equals(encryptionKeyUri)) {
            // Encryption is specified and the key has changed.
            out.chunk = newEncryptionKeyChunk(keyUri, segment.encryptionIV, selectedVariantIndex, trackSelection.getSelectionReason(), trackSelection.getSelectionData());
            return;
        }
        if (!Util.areEqual(segment.encryptionIV, encryptionIvString)) {
            setEncryptionData(keyUri, segment.encryptionIV, encryptionKey);
        }
    } else {
        clearEncryptionData();
    }
    DataSpec initDataSpec = null;
    Segment initSegment = mediaPlaylist.initializationSegment;
    if (initSegment != null) {
        Uri initSegmentUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, initSegment.url);
        initDataSpec = new DataSpec(initSegmentUri, initSegment.byterangeOffset, initSegment.byterangeLength, null);
    }
    // Compute start time of the next chunk.
    long startTimeUs = mediaPlaylist.startTimeUs + segment.relativeStartTimeUs;
    int discontinuitySequence = mediaPlaylist.discontinuitySequence + segment.relativeDiscontinuitySequence;
    TimestampAdjuster timestampAdjuster = timestampAdjusterProvider.getAdjuster(discontinuitySequence);
    // Configure the data source and spec for the chunk.
    Uri chunkUri = UriUtil.resolveToUri(mediaPlaylist.baseUri, segment.url);
    DataSpec dataSpec = new DataSpec(chunkUri, segment.byterangeOffset, segment.byterangeLength, null);
    out.chunk = new HlsMediaChunk(mediaDataSource, dataSpec, initDataSpec, selectedUrl, muxedCaptionFormats, trackSelection.getSelectionReason(), trackSelection.getSelectionData(), startTimeUs, startTimeUs + segment.durationUs, chunkMediaSequence, discontinuitySequence, isTimestampMaster, timestampAdjuster, previous, encryptionKey, encryptionIv);
}
Also used : Segment(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment) HlsUrl(com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.HlsUrl) BehindLiveWindowException(com.google.android.exoplayer2.source.BehindLiveWindowException) DataSpec(com.google.android.exoplayer2.upstream.DataSpec) HlsMediaPlaylist(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist) TimestampAdjuster(com.google.android.exoplayer2.util.TimestampAdjuster) Uri(android.net.Uri) Segment(com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment)

Example 4 with Chunk

use of com.google.android.exoplayer2.source.chunk.Chunk in project ExoPlayer by google.

the class HlsMediaChunk method peekId3PrivTimestamp.

/**
   * Peek the presentation timestamp of the first sample in the chunk from an ID3 PRIV as defined
   * in the HLS spec, version 20, Section 3.4. Returns {@link C#TIME_UNSET} if the frame is not
   * found. This method only modifies the peek position.
   *
   * @param input The {@link ExtractorInput} to obtain the PRIV frame from.
   * @return The parsed, adjusted timestamp in microseconds
   * @throws IOException If an error occurred peeking from the input.
   * @throws InterruptedException If the thread was interrupted.
   */
private long peekId3PrivTimestamp(ExtractorInput input) throws IOException, InterruptedException {
    input.resetPeekPosition();
    if (!input.peekFully(id3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH, true)) {
        return C.TIME_UNSET;
    }
    id3Data.reset(Id3Decoder.ID3_HEADER_LENGTH);
    int id = id3Data.readUnsignedInt24();
    if (id != Id3Decoder.ID3_TAG) {
        return C.TIME_UNSET;
    }
    // version(2), flags(1).
    id3Data.skipBytes(3);
    int id3Size = id3Data.readSynchSafeInt();
    int requiredCapacity = id3Size + Id3Decoder.ID3_HEADER_LENGTH;
    if (requiredCapacity > id3Data.capacity()) {
        byte[] data = id3Data.data;
        id3Data.reset(requiredCapacity);
        System.arraycopy(data, 0, id3Data.data, 0, Id3Decoder.ID3_HEADER_LENGTH);
    }
    if (!input.peekFully(id3Data.data, Id3Decoder.ID3_HEADER_LENGTH, id3Size, true)) {
        return C.TIME_UNSET;
    }
    Metadata metadata = id3Decoder.decode(id3Data.data, id3Size);
    if (metadata == null) {
        return C.TIME_UNSET;
    }
    int metadataLength = metadata.length();
    for (int i = 0; i < metadataLength; i++) {
        Metadata.Entry frame = metadata.get(i);
        if (frame instanceof PrivFrame) {
            PrivFrame privFrame = (PrivFrame) frame;
            if (PRIV_TIMESTAMP_FRAME_OWNER.equals(privFrame.owner)) {
                System.arraycopy(privFrame.privateData, 0, id3Data.data, 0, 8);
                id3Data.reset(8);
                return id3Data.readLong();
            }
        }
    }
    return C.TIME_UNSET;
}
Also used : Metadata(com.google.android.exoplayer2.metadata.Metadata) PrivFrame(com.google.android.exoplayer2.metadata.id3.PrivFrame)

Example 5 with Chunk

use of com.google.android.exoplayer2.source.chunk.Chunk in project ExoPlayer by google.

the class HlsMediaChunk method loadMedia.

private void loadMedia() throws IOException, InterruptedException {
    // If we previously fed part of this chunk to the extractor, we need to skip it this time. For
    // encrypted content we need to skip the data by reading it through the source, so as to ensure
    // correct decryption of the remainder of the chunk. For clear content, we can request the
    // remainder of the chunk directly.
    DataSpec loadDataSpec;
    boolean skipLoadedBytes;
    if (isEncrypted) {
        loadDataSpec = dataSpec;
        skipLoadedBytes = bytesLoaded != 0;
    } else {
        loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded);
        skipLoadedBytes = false;
    }
    if (!isMasterTimestampSource) {
        timestampAdjuster.waitUntilInitialized();
    } else if (timestampAdjuster.getFirstSampleTimestampUs() == TimestampAdjuster.DO_NOT_OFFSET) {
        // We're the master and we haven't set the desired first sample timestamp yet.
        timestampAdjuster.setFirstSampleTimestampUs(startTimeUs);
    }
    try {
        ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec));
        if (extractor == null) {
            // Media segment format is packed audio.
            long id3Timestamp = peekId3PrivTimestamp(input);
            extractor = buildPackedAudioExtractor(id3Timestamp != C.TIME_UNSET ? timestampAdjuster.adjustTsTimestamp(id3Timestamp) : startTimeUs);
        }
        if (skipLoadedBytes) {
            input.skipFully(bytesLoaded);
        }
        try {
            int result = Extractor.RESULT_CONTINUE;
            while (result == Extractor.RESULT_CONTINUE && !loadCanceled) {
                result = extractor.read(input, null);
            }
        } finally {
            bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
        }
    } finally {
        Util.closeQuietly(dataSource);
    }
    loadCompleted = true;
}
Also used : ExtractorInput(com.google.android.exoplayer2.extractor.ExtractorInput) DefaultExtractorInput(com.google.android.exoplayer2.extractor.DefaultExtractorInput) DataSpec(com.google.android.exoplayer2.upstream.DataSpec) DefaultExtractorInput(com.google.android.exoplayer2.extractor.DefaultExtractorInput)

Aggregations

DataSpec (com.google.android.exoplayer2.upstream.DataSpec)5 RangedUri (com.google.android.exoplayer2.source.dash.manifest.RangedUri)4 ParserException (com.google.android.exoplayer2.ParserException)3 BehindLiveWindowException (com.google.android.exoplayer2.source.BehindLiveWindowException)3 ParsableByteArray (com.google.android.exoplayer2.util.ParsableByteArray)3 Uri (android.net.Uri)2 Format (com.google.android.exoplayer2.Format)2 ChunkExtractorWrapper (com.google.android.exoplayer2.source.chunk.ChunkExtractorWrapper)2 ContainerMediaChunk (com.google.android.exoplayer2.source.chunk.ContainerMediaChunk)2 InitializationChunk (com.google.android.exoplayer2.source.chunk.InitializationChunk)2 Representation (com.google.android.exoplayer2.source.dash.manifest.Representation)2 HlsUrl (com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist.HlsUrl)2 TimestampAdjuster (com.google.android.exoplayer2.util.TimestampAdjuster)2 ChunkIndex (com.google.android.exoplayer2.extractor.ChunkIndex)1 DefaultExtractorInput (com.google.android.exoplayer2.extractor.DefaultExtractorInput)1 DefaultTrackOutput (com.google.android.exoplayer2.extractor.DefaultTrackOutput)1 Extractor (com.google.android.exoplayer2.extractor.Extractor)1 ExtractorInput (com.google.android.exoplayer2.extractor.ExtractorInput)1 SeekMap (com.google.android.exoplayer2.extractor.SeekMap)1 Mp3Extractor (com.google.android.exoplayer2.extractor.mp3.Mp3Extractor)1