Search in sources :

Example 96 with Metadata

use of androidx.media3.common.Metadata in project media by androidx.

the class Mp3Extractor method synchronize.

private boolean synchronize(ExtractorInput input, boolean sniffing) throws IOException {
    int validFrameCount = 0;
    int candidateSynchronizedHeaderData = 0;
    int peekedId3Bytes = 0;
    int searchedBytes = 0;
    int searchLimitBytes = sniffing ? MAX_SNIFF_BYTES : MAX_SYNC_BYTES;
    input.resetPeekPosition();
    if (input.getPosition() == 0) {
        // We need to parse enough ID3 metadata to retrieve any gapless/seeking playback information
        // even if ID3 metadata parsing is disabled.
        boolean parseAllId3Frames = (flags & FLAG_DISABLE_ID3_METADATA) == 0;
        Id3Decoder.FramePredicate id3FramePredicate = parseAllId3Frames ? null : REQUIRED_ID3_FRAME_PREDICATE;
        metadata = id3Peeker.peekId3Data(input, id3FramePredicate);
        if (metadata != null) {
            gaplessInfoHolder.setFromMetadata(metadata);
        }
        peekedId3Bytes = (int) input.getPeekPosition();
        if (!sniffing) {
            input.skipFully(peekedId3Bytes);
        }
    }
    while (true) {
        if (peekEndOfStreamOrHeader(input)) {
            if (validFrameCount > 0) {
                // We reached the end of the stream but found at least one valid frame.
                break;
            }
            throw new EOFException();
        }
        scratch.setPosition(0);
        int headerData = scratch.readInt();
        int frameSize;
        if ((candidateSynchronizedHeaderData != 0 && !headersMatch(headerData, candidateSynchronizedHeaderData)) || (frameSize = MpegAudioUtil.getFrameSize(headerData)) == C.LENGTH_UNSET) {
            // The header doesn't match the candidate header or is invalid. Try the next byte offset.
            if (searchedBytes++ == searchLimitBytes) {
                if (!sniffing) {
                    throw ParserException.createForMalformedContainer("Searched too many bytes.", /* cause= */
                    null);
                }
                return false;
            }
            validFrameCount = 0;
            candidateSynchronizedHeaderData = 0;
            if (sniffing) {
                input.resetPeekPosition();
                input.advancePeekPosition(peekedId3Bytes + searchedBytes);
            } else {
                input.skipFully(1);
            }
        } else {
            // The header matches the candidate header and/or is valid.
            validFrameCount++;
            if (validFrameCount == 1) {
                synchronizedHeader.setForHeaderData(headerData);
                candidateSynchronizedHeaderData = headerData;
            } else if (validFrameCount == 4) {
                break;
            }
            input.advancePeekPosition(frameSize - 4);
        }
    }
    // Prepare to read the synchronized frame.
    if (sniffing) {
        input.skipFully(peekedId3Bytes + searchedBytes);
    } else {
        input.resetPeekPosition();
    }
    synchronizedHeaderData = candidateSynchronizedHeaderData;
    return true;
}
Also used : FramePredicate(androidx.media3.extractor.metadata.id3.Id3Decoder.FramePredicate) EOFException(java.io.EOFException) Id3Decoder(androidx.media3.extractor.metadata.id3.Id3Decoder)

Example 97 with Metadata

use of androidx.media3.common.Metadata in project media by androidx.

the class AtomParsers method parseStbl.

/**
 * Parses an stbl atom (defined in ISO/IEC 14496-12).
 *
 * @param track Track to which this sample table corresponds.
 * @param stblAtom stbl (sample table) atom to decode.
 * @param gaplessInfoHolder Holder to populate with gapless playback information.
 * @return Sample table described by the stbl atom.
 * @throws ParserException Thrown if the stbl atom can't be parsed.
 */
private static TrackSampleTable parseStbl(Track track, Atom.ContainerAtom stblAtom, GaplessInfoHolder gaplessInfoHolder) throws ParserException {
    SampleSizeBox sampleSizeBox;
    @Nullable Atom.LeafAtom stszAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz);
    if (stszAtom != null) {
        sampleSizeBox = new StszSampleSizeBox(stszAtom, track.format);
    } else {
        @Nullable Atom.LeafAtom stz2Atom = stblAtom.getLeafAtomOfType(Atom.TYPE_stz2);
        if (stz2Atom == null) {
            throw ParserException.createForMalformedContainer("Track has no sample table size information", /* cause= */
            null);
        }
        sampleSizeBox = new Stz2SampleSizeBox(stz2Atom);
    }
    int sampleCount = sampleSizeBox.getSampleCount();
    if (sampleCount == 0) {
        return new TrackSampleTable(track, /* offsets= */
        new long[0], /* sizes= */
        new int[0], /* maximumSize= */
        0, /* timestampsUs= */
        new long[0], /* flags= */
        new int[0], /* durationUs= */
        0);
    }
    // Entries are byte offsets of chunks.
    boolean chunkOffsetsAreLongs = false;
    @Nullable Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco);
    if (chunkOffsetsAtom == null) {
        chunkOffsetsAreLongs = true;
        chunkOffsetsAtom = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_co64));
    }
    ParsableByteArray chunkOffsets = chunkOffsetsAtom.data;
    // Entries are (chunk number, number of samples per chunk, sample description index).
    ParsableByteArray stsc = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_stsc)).data;
    // Entries are (number of samples, timestamp delta between those samples).
    ParsableByteArray stts = checkNotNull(stblAtom.getLeafAtomOfType(Atom.TYPE_stts)).data;
    // Entries are the indices of samples that are synchronization samples.
    @Nullable Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss);
    @Nullable ParsableByteArray stss = stssAtom != null ? stssAtom.data : null;
    // Entries are (number of samples, timestamp offset).
    @Nullable Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts);
    @Nullable ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null;
    // Prepare to read chunk information.
    ChunkIterator chunkIterator = new ChunkIterator(stsc, chunkOffsets, chunkOffsetsAreLongs);
    // Prepare to read sample timestamps.
    stts.setPosition(Atom.FULL_HEADER_SIZE);
    int remainingTimestampDeltaChanges = stts.readUnsignedIntToInt() - 1;
    int remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
    int timestampDeltaInTimeUnits = stts.readUnsignedIntToInt();
    // Prepare to read sample timestamp offsets, if ctts is present.
    int remainingSamplesAtTimestampOffset = 0;
    int remainingTimestampOffsetChanges = 0;
    int timestampOffset = 0;
    if (ctts != null) {
        ctts.setPosition(Atom.FULL_HEADER_SIZE);
        remainingTimestampOffsetChanges = ctts.readUnsignedIntToInt();
    }
    int nextSynchronizationSampleIndex = C.INDEX_UNSET;
    int remainingSynchronizationSamples = 0;
    if (stss != null) {
        stss.setPosition(Atom.FULL_HEADER_SIZE);
        remainingSynchronizationSamples = stss.readUnsignedIntToInt();
        if (remainingSynchronizationSamples > 0) {
            nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1;
        } else {
            // Ignore empty stss boxes, which causes all samples to be treated as sync samples.
            stss = null;
        }
    }
    // Fixed sample size raw audio may need to be rechunked.
    int fixedSampleSize = sampleSizeBox.getFixedSampleSize();
    @Nullable String sampleMimeType = track.format.sampleMimeType;
    boolean rechunkFixedSizeSamples = fixedSampleSize != C.LENGTH_UNSET && (MimeTypes.AUDIO_RAW.equals(sampleMimeType) || MimeTypes.AUDIO_MLAW.equals(sampleMimeType) || MimeTypes.AUDIO_ALAW.equals(sampleMimeType)) && remainingTimestampDeltaChanges == 0 && remainingTimestampOffsetChanges == 0 && remainingSynchronizationSamples == 0;
    long[] offsets;
    int[] sizes;
    int maximumSize = 0;
    long[] timestamps;
    int[] flags;
    long timestampTimeUnits = 0;
    long duration;
    if (rechunkFixedSizeSamples) {
        long[] chunkOffsetsBytes = new long[chunkIterator.length];
        int[] chunkSampleCounts = new int[chunkIterator.length];
        while (chunkIterator.moveNext()) {
            chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset;
            chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples;
        }
        FixedSampleSizeRechunker.Results rechunkedResults = FixedSampleSizeRechunker.rechunk(fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits);
        offsets = rechunkedResults.offsets;
        sizes = rechunkedResults.sizes;
        maximumSize = rechunkedResults.maximumSize;
        timestamps = rechunkedResults.timestamps;
        flags = rechunkedResults.flags;
        duration = rechunkedResults.duration;
    } else {
        offsets = new long[sampleCount];
        sizes = new int[sampleCount];
        timestamps = new long[sampleCount];
        flags = new int[sampleCount];
        long offset = 0;
        int remainingSamplesInChunk = 0;
        for (int i = 0; i < sampleCount; i++) {
            // Advance to the next chunk if necessary.
            boolean chunkDataComplete = true;
            while (remainingSamplesInChunk == 0 && (chunkDataComplete = chunkIterator.moveNext())) {
                offset = chunkIterator.offset;
                remainingSamplesInChunk = chunkIterator.numSamples;
            }
            if (!chunkDataComplete) {
                Log.w(TAG, "Unexpected end of chunk data");
                sampleCount = i;
                offsets = Arrays.copyOf(offsets, sampleCount);
                sizes = Arrays.copyOf(sizes, sampleCount);
                timestamps = Arrays.copyOf(timestamps, sampleCount);
                flags = Arrays.copyOf(flags, sampleCount);
                break;
            }
            // Add on the timestamp offset if ctts is present.
            if (ctts != null) {
                while (remainingSamplesAtTimestampOffset == 0 && remainingTimestampOffsetChanges > 0) {
                    remainingSamplesAtTimestampOffset = ctts.readUnsignedIntToInt();
                    // The BMFF spec (ISO/IEC 14496-12) states that sample offsets should be unsigned
                    // integers in version 0 ctts boxes, however some streams violate the spec and use
                    // signed integers instead. It's safe to always decode sample offsets as signed integers
                    // here, because unsigned integers will still be parsed correctly (unless their top bit
                    // is set, which is never true in practice because sample offsets are always small).
                    timestampOffset = ctts.readInt();
                    remainingTimestampOffsetChanges--;
                }
                remainingSamplesAtTimestampOffset--;
            }
            offsets[i] = offset;
            sizes[i] = sampleSizeBox.readNextSampleSize();
            if (sizes[i] > maximumSize) {
                maximumSize = sizes[i];
            }
            timestamps[i] = timestampTimeUnits + timestampOffset;
            // All samples are synchronization samples if the stss is not present.
            flags[i] = stss == null ? C.BUFFER_FLAG_KEY_FRAME : 0;
            if (i == nextSynchronizationSampleIndex) {
                flags[i] = C.BUFFER_FLAG_KEY_FRAME;
                remainingSynchronizationSamples--;
                if (remainingSynchronizationSamples > 0) {
                    nextSynchronizationSampleIndex = checkNotNull(stss).readUnsignedIntToInt() - 1;
                }
            }
            // Add on the duration of this sample.
            timestampTimeUnits += timestampDeltaInTimeUnits;
            remainingSamplesAtTimestampDelta--;
            if (remainingSamplesAtTimestampDelta == 0 && remainingTimestampDeltaChanges > 0) {
                remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
                // The BMFF spec (ISO/IEC 14496-12) states that sample deltas should be unsigned integers
                // in stts boxes, however some streams violate the spec and use signed integers instead.
                // See https://github.com/google/ExoPlayer/issues/3384. It's safe to always decode sample
                // deltas as signed integers here, because unsigned integers will still be parsed
                // correctly (unless their top bit is set, which is never true in practice because sample
                // deltas are always small).
                timestampDeltaInTimeUnits = stts.readInt();
                remainingTimestampDeltaChanges--;
            }
            offset += sizes[i];
            remainingSamplesInChunk--;
        }
        duration = timestampTimeUnits + timestampOffset;
        // If the stbl's child boxes are not consistent the container is malformed, but the stream may
        // still be playable.
        boolean isCttsValid = true;
        if (ctts != null) {
            while (remainingTimestampOffsetChanges > 0) {
                if (ctts.readUnsignedIntToInt() != 0) {
                    isCttsValid = false;
                    break;
                }
                // Ignore offset.
                ctts.readInt();
                remainingTimestampOffsetChanges--;
            }
        }
        if (remainingSynchronizationSamples != 0 || remainingSamplesAtTimestampDelta != 0 || remainingSamplesInChunk != 0 || remainingTimestampDeltaChanges != 0 || remainingSamplesAtTimestampOffset != 0 || !isCttsValid) {
            Log.w(TAG, "Inconsistent stbl box for track " + track.id + ": remainingSynchronizationSamples " + remainingSynchronizationSamples + ", remainingSamplesAtTimestampDelta " + remainingSamplesAtTimestampDelta + ", remainingSamplesInChunk " + remainingSamplesInChunk + ", remainingTimestampDeltaChanges " + remainingTimestampDeltaChanges + ", remainingSamplesAtTimestampOffset " + remainingSamplesAtTimestampOffset + (!isCttsValid ? ", ctts invalid" : ""));
        }
    }
    long durationUs = Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, track.timescale);
    if (track.editListDurations == null) {
        Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale);
        return new TrackSampleTable(track, offsets, sizes, maximumSize, timestamps, flags, durationUs);
    }
    if (track.editListDurations.length == 1 && track.type == C.TRACK_TYPE_AUDIO && timestamps.length >= 2) {
        long editStartTime = checkNotNull(track.editListMediaTimes)[0];
        long editEndTime = editStartTime + Util.scaleLargeTimestamp(track.editListDurations[0], track.timescale, track.movieTimescale);
        if (canApplyEditWithGaplessInfo(timestamps, duration, editStartTime, editEndTime)) {
            long paddingTimeUnits = duration - editEndTime;
            long encoderDelay = Util.scaleLargeTimestamp(editStartTime - timestamps[0], track.format.sampleRate, track.timescale);
            long encoderPadding = Util.scaleLargeTimestamp(paddingTimeUnits, track.format.sampleRate, track.timescale);
            if ((encoderDelay != 0 || encoderPadding != 0) && encoderDelay <= Integer.MAX_VALUE && encoderPadding <= Integer.MAX_VALUE) {
                gaplessInfoHolder.encoderDelay = (int) encoderDelay;
                gaplessInfoHolder.encoderPadding = (int) encoderPadding;
                Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale);
                long editedDurationUs = Util.scaleLargeTimestamp(track.editListDurations[0], C.MICROS_PER_SECOND, track.movieTimescale);
                return new TrackSampleTable(track, offsets, sizes, maximumSize, timestamps, flags, editedDurationUs);
            }
        }
    }
    if (track.editListDurations.length == 1 && track.editListDurations[0] == 0) {
        // The current version of the spec leaves handling of an edit with zero segment_duration in
        // unfragmented files open to interpretation. We handle this as a special case and include all
        // samples in the edit.
        long editStartTime = checkNotNull(track.editListMediaTimes)[0];
        for (int i = 0; i < timestamps.length; i++) {
            timestamps[i] = Util.scaleLargeTimestamp(timestamps[i] - editStartTime, C.MICROS_PER_SECOND, track.timescale);
        }
        durationUs = Util.scaleLargeTimestamp(duration - editStartTime, C.MICROS_PER_SECOND, track.timescale);
        return new TrackSampleTable(track, offsets, sizes, maximumSize, timestamps, flags, durationUs);
    }
    // Omit any sample at the end point of an edit for audio tracks.
    boolean omitClippedSample = track.type == C.TRACK_TYPE_AUDIO;
    // Count the number of samples after applying edits.
    int editedSampleCount = 0;
    int nextSampleIndex = 0;
    boolean copyMetadata = false;
    int[] startIndices = new int[track.editListDurations.length];
    int[] endIndices = new int[track.editListDurations.length];
    long[] editListMediaTimes = checkNotNull(track.editListMediaTimes);
    for (int i = 0; i < track.editListDurations.length; i++) {
        long editMediaTime = editListMediaTimes[i];
        if (editMediaTime != -1) {
            long editDuration = Util.scaleLargeTimestamp(track.editListDurations[i], track.timescale, track.movieTimescale);
            startIndices[i] = Util.binarySearchFloor(timestamps, editMediaTime, /* inclusive= */
            true, /* stayInBounds= */
            true);
            endIndices[i] = Util.binarySearchCeil(timestamps, editMediaTime + editDuration, /* inclusive= */
            omitClippedSample, /* stayInBounds= */
            false);
            while (startIndices[i] < endIndices[i] && (flags[startIndices[i]] & C.BUFFER_FLAG_KEY_FRAME) == 0) {
                // Applying the edit correctly would require prerolling from the previous sync sample. In
                // the current implementation we advance to the next sync sample instead. Only other
                // tracks (i.e. audio) will be rendered until the time of the first sync sample.
                // See https://github.com/google/ExoPlayer/issues/1659.
                startIndices[i]++;
            }
            editedSampleCount += endIndices[i] - startIndices[i];
            copyMetadata |= nextSampleIndex != startIndices[i];
            nextSampleIndex = endIndices[i];
        }
    }
    copyMetadata |= editedSampleCount != sampleCount;
    // Calculate edited sample timestamps and update the corresponding metadata arrays.
    long[] editedOffsets = copyMetadata ? new long[editedSampleCount] : offsets;
    int[] editedSizes = copyMetadata ? new int[editedSampleCount] : sizes;
    int editedMaximumSize = copyMetadata ? 0 : maximumSize;
    int[] editedFlags = copyMetadata ? new int[editedSampleCount] : flags;
    long[] editedTimestamps = new long[editedSampleCount];
    long pts = 0;
    int sampleIndex = 0;
    for (int i = 0; i < track.editListDurations.length; i++) {
        long editMediaTime = track.editListMediaTimes[i];
        int startIndex = startIndices[i];
        int endIndex = endIndices[i];
        if (copyMetadata) {
            int count = endIndex - startIndex;
            System.arraycopy(offsets, startIndex, editedOffsets, sampleIndex, count);
            System.arraycopy(sizes, startIndex, editedSizes, sampleIndex, count);
            System.arraycopy(flags, startIndex, editedFlags, sampleIndex, count);
        }
        for (int j = startIndex; j < endIndex; j++) {
            long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale);
            long timeInSegmentUs = Util.scaleLargeTimestamp(max(0, timestamps[j] - editMediaTime), C.MICROS_PER_SECOND, track.timescale);
            editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs;
            if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) {
                editedMaximumSize = sizes[j];
            }
            sampleIndex++;
        }
        pts += track.editListDurations[i];
    }
    long editedDurationUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale);
    return new TrackSampleTable(track, editedOffsets, editedSizes, editedMaximumSize, editedTimestamps, editedFlags, editedDurationUs);
}
Also used : ParsableByteArray(androidx.media3.common.util.ParsableByteArray) Nullable(androidx.annotation.Nullable)

Example 98 with Metadata

use of androidx.media3.common.Metadata in project media by androidx.

the class AtomParsers method parseIlst.

@Nullable
private static Metadata parseIlst(ParsableByteArray ilst, int limit) {
    ilst.skipBytes(Atom.HEADER_SIZE);
    ArrayList<Metadata.Entry> entries = new ArrayList<>();
    while (ilst.getPosition() < limit) {
        @Nullable Metadata.Entry entry = MetadataUtil.parseIlstElement(ilst);
        if (entry != null) {
            entries.add(entry);
        }
    }
    return entries.isEmpty() ? null : new Metadata(entries);
}
Also used : SmtaMetadataEntry(androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry) ArrayList(java.util.ArrayList) Metadata(androidx.media3.common.Metadata) Nullable(androidx.annotation.Nullable) Nullable(androidx.annotation.Nullable)

Example 99 with Metadata

use of androidx.media3.common.Metadata in project media by androidx.

the class MatroskaExtractor method endMasterElement.

/**
 * Called when the end of a master element is encountered.
 *
 * @see EbmlProcessor#endMasterElement(int)
 */
@CallSuper
protected void endMasterElement(int id) throws ParserException {
    assertInitialized();
    switch(id) {
        case ID_SEGMENT_INFO:
            if (timecodeScale == C.TIME_UNSET) {
                // timecodeScale was omitted. Use the default value.
                timecodeScale = 1000000;
            }
            if (durationTimecode != C.TIME_UNSET) {
                durationUs = scaleTimecodeToUs(durationTimecode);
            }
            break;
        case ID_SEEK:
            if (seekEntryId == UNSET_ENTRY_ID || seekEntryPosition == C.POSITION_UNSET) {
                throw ParserException.createForMalformedContainer("Mandatory element SeekID or SeekPosition not found", /* cause= */
                null);
            }
            if (seekEntryId == ID_CUES) {
                cuesContentPosition = seekEntryPosition;
            }
            break;
        case ID_CUES:
            if (!sentSeekMap) {
                extractorOutput.seekMap(buildSeekMap(cueTimesUs, cueClusterPositions));
                sentSeekMap = true;
            } else {
            // We have already built the cues. Ignore.
            }
            this.cueTimesUs = null;
            this.cueClusterPositions = null;
            break;
        case ID_BLOCK_GROUP:
            if (blockState != BLOCK_STATE_DATA) {
                // We've skipped this block (due to incompatible track number).
                return;
            }
            // Commit sample metadata.
            int sampleOffset = 0;
            for (int i = 0; i < blockSampleCount; i++) {
                sampleOffset += blockSampleSizes[i];
            }
            Track track = tracks.get(blockTrackNumber);
            track.assertOutputInitialized();
            for (int i = 0; i < blockSampleCount; i++) {
                long sampleTimeUs = blockTimeUs + (i * track.defaultSampleDurationNs) / 1000;
                int sampleFlags = blockFlags;
                if (i == 0 && !blockHasReferenceBlock) {
                    // If the ReferenceBlock element was not found in this block, then the first frame is a
                    // keyframe.
                    sampleFlags |= C.BUFFER_FLAG_KEY_FRAME;
                }
                int sampleSize = blockSampleSizes[i];
                // The offset is to the end of the sample.
                sampleOffset -= sampleSize;
                commitSampleToOutput(track, sampleTimeUs, sampleFlags, sampleSize, sampleOffset);
            }
            blockState = BLOCK_STATE_START;
            break;
        case ID_CONTENT_ENCODING:
            assertInTrackEntry(id);
            if (currentTrack.hasContentEncryption) {
                if (currentTrack.cryptoData == null) {
                    throw ParserException.createForMalformedContainer("Encrypted Track found but ContentEncKeyID was not found", /* cause= */
                    null);
                }
                currentTrack.drmInitData = new DrmInitData(new SchemeData(C.UUID_NIL, MimeTypes.VIDEO_WEBM, currentTrack.cryptoData.encryptionKey));
            }
            break;
        case ID_CONTENT_ENCODINGS:
            assertInTrackEntry(id);
            if (currentTrack.hasContentEncryption && currentTrack.sampleStrippedBytes != null) {
                throw ParserException.createForMalformedContainer("Combining encryption and compression is not supported", /* cause= */
                null);
            }
            break;
        case ID_TRACK_ENTRY:
            Track currentTrack = checkStateNotNull(this.currentTrack);
            if (currentTrack.codecId == null) {
                throw ParserException.createForMalformedContainer("CodecId is missing in TrackEntry element", /* cause= */
                null);
            } else {
                if (isCodecSupported(currentTrack.codecId)) {
                    currentTrack.initializeOutput(extractorOutput, currentTrack.number);
                    tracks.put(currentTrack.number, currentTrack);
                }
            }
            this.currentTrack = null;
            break;
        case ID_TRACKS:
            if (tracks.size() == 0) {
                throw ParserException.createForMalformedContainer("No valid tracks were found", /* cause= */
                null);
            }
            extractorOutput.endTracks();
            break;
        default:
            break;
    }
}
Also used : DrmInitData(androidx.media3.common.DrmInitData) SchemeData(androidx.media3.common.DrmInitData.SchemeData) CallSuper(androidx.annotation.CallSuper)

Example 100 with Metadata

use of androidx.media3.common.Metadata in project media by androidx.

the class FlacReader method readHeaders.

@Override
@EnsuresNonNullIf(expression = "#3.format", result = false)
protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) {
    byte[] data = packet.getData();
    @Nullable FlacStreamMetadata streamMetadata = this.streamMetadata;
    if (streamMetadata == null) {
        streamMetadata = new FlacStreamMetadata(data, 17);
        this.streamMetadata = streamMetadata;
        byte[] metadata = Arrays.copyOfRange(data, 9, packet.limit());
        setupData.format = streamMetadata.getFormat(metadata, /* id3Metadata= */
        null);
        return true;
    }
    if ((data[0] & 0x7F) == FlacConstants.METADATA_TYPE_SEEK_TABLE) {
        SeekTable seekTable = FlacMetadataReader.readSeekTableMetadataBlock(packet);
        streamMetadata = streamMetadata.copyWithSeekTable(seekTable);
        this.streamMetadata = streamMetadata;
        flacOggSeeker = new FlacOggSeeker(streamMetadata, seekTable);
        return true;
    }
    if (isAudioPacket(data)) {
        if (flacOggSeeker != null) {
            flacOggSeeker.setFirstFrameOffset(position);
            setupData.oggSeeker = flacOggSeeker;
        }
        checkNotNull(setupData.format);
        return false;
    }
    return true;
}
Also used : SeekTable(androidx.media3.extractor.FlacStreamMetadata.SeekTable) Nullable(androidx.annotation.Nullable) FlacStreamMetadata(androidx.media3.extractor.FlacStreamMetadata) EnsuresNonNullIf(org.checkerframework.checker.nullness.qual.EnsuresNonNullIf)

Aggregations

Metadata (androidx.media3.common.Metadata)81 Test (org.junit.Test)79 Nullable (androidx.annotation.Nullable)46 Format (androidx.media3.common.Format)18 MediaMetadata (androidx.media3.common.MediaMetadata)17 ArrayList (java.util.ArrayList)17 MediaItem (androidx.media3.common.MediaItem)16 FakeExtractorInput (androidx.media3.test.utils.FakeExtractorInput)14 ParsableByteArray (androidx.media3.common.util.ParsableByteArray)13 MediaMetadataCompat (android.support.v4.media.MediaMetadataCompat)10 CountDownLatch (java.util.concurrent.CountDownLatch)9 MotionPhotoMetadata (androidx.media3.extractor.metadata.mp4.MotionPhotoMetadata)8 Player (androidx.media3.common.Player)7 MediumTest (androidx.test.filters.MediumTest)7 TrackGroupArray (androidx.media3.common.TrackGroupArray)6 AtomicReference (java.util.concurrent.atomic.AtomicReference)6 MediaDescriptionCompat (android.support.v4.media.MediaDescriptionCompat)5 Timeline (androidx.media3.common.Timeline)5 DatabaseIOException (androidx.media3.database.DatabaseIOException)5 TrackOutput (androidx.media3.extractor.TrackOutput)5