Search in sources :

Example 16 with ParserException

use of androidx.media3.common.ParserException in project media by androidx.

the class FragmentedMp4Extractor method onMoofContainerAtomRead.

private void onMoofContainerAtomRead(ContainerAtom moof) throws ParserException {
    parseMoof(moof, trackBundles, sideloadedTrack != null, flags, scratchBytes);
    @Nullable DrmInitData drmInitData = getDrmInitDataFromAtoms(moof.leafChildren);
    if (drmInitData != null) {
        int trackCount = trackBundles.size();
        for (int i = 0; i < trackCount; i++) {
            trackBundles.valueAt(i).updateDrmInitData(drmInitData);
        }
    }
    // If we have a pending seek, advance tracks to their preceding sync frames.
    if (pendingSeekTimeUs != C.TIME_UNSET) {
        int trackCount = trackBundles.size();
        for (int i = 0; i < trackCount; i++) {
            trackBundles.valueAt(i).seek(pendingSeekTimeUs);
        }
        pendingSeekTimeUs = C.TIME_UNSET;
    }
}
Also used : DrmInitData(androidx.media3.common.DrmInitData) Nullable(androidx.annotation.Nullable)

Example 17 with ParserException

use of androidx.media3.common.ParserException in project media by androidx.

the class FragmentedMp4Extractor method parseTraf.

/**
 * Parses a traf atom (defined in 14496-12).
 */
private static void parseTraf(ContainerAtom traf, SparseArray<TrackBundle> trackBundles, boolean haveSideloadedTrack, @Flags int flags, byte[] extendedTypeScratch) throws ParserException {
    LeafAtom tfhd = checkNotNull(traf.getLeafAtomOfType(Atom.TYPE_tfhd));
    @Nullable TrackBundle trackBundle = parseTfhd(tfhd.data, trackBundles, haveSideloadedTrack);
    if (trackBundle == null) {
        return;
    }
    TrackFragment fragment = trackBundle.fragment;
    long fragmentDecodeTime = fragment.nextFragmentDecodeTime;
    boolean fragmentDecodeTimeIncludesMoov = fragment.nextFragmentDecodeTimeIncludesMoov;
    trackBundle.resetFragmentInfo();
    trackBundle.currentlyInFragment = true;
    @Nullable LeafAtom tfdtAtom = traf.getLeafAtomOfType(Atom.TYPE_tfdt);
    if (tfdtAtom != null && (flags & FLAG_WORKAROUND_IGNORE_TFDT_BOX) == 0) {
        fragment.nextFragmentDecodeTime = parseTfdt(tfdtAtom.data);
        fragment.nextFragmentDecodeTimeIncludesMoov = true;
    } else {
        fragment.nextFragmentDecodeTime = fragmentDecodeTime;
        fragment.nextFragmentDecodeTimeIncludesMoov = fragmentDecodeTimeIncludesMoov;
    }
    parseTruns(traf, trackBundle, flags);
    @Nullable TrackEncryptionBox encryptionBox = trackBundle.moovSampleTable.track.getSampleDescriptionEncryptionBox(checkNotNull(fragment.header).sampleDescriptionIndex);
    @Nullable LeafAtom saiz = traf.getLeafAtomOfType(Atom.TYPE_saiz);
    if (saiz != null) {
        parseSaiz(checkNotNull(encryptionBox), saiz.data, fragment);
    }
    @Nullable LeafAtom saio = traf.getLeafAtomOfType(Atom.TYPE_saio);
    if (saio != null) {
        parseSaio(saio.data, fragment);
    }
    @Nullable LeafAtom senc = traf.getLeafAtomOfType(Atom.TYPE_senc);
    if (senc != null) {
        parseSenc(senc.data, fragment);
    }
    parseSampleGroups(traf, encryptionBox != null ? encryptionBox.schemeType : null, fragment);
    int leafChildrenSize = traf.leafChildren.size();
    for (int i = 0; i < leafChildrenSize; i++) {
        LeafAtom atom = traf.leafChildren.get(i);
        if (atom.type == Atom.TYPE_uuid) {
            parseUuid(atom.data, fragment, extendedTypeScratch);
        }
    }
}
Also used : LeafAtom(androidx.media3.extractor.mp4.Atom.LeafAtom) Nullable(androidx.annotation.Nullable)

Example 18 with ParserException

use of androidx.media3.common.ParserException in project media by androidx.

the class Mp4Extractor method processMoovAtom.

/**
 * Updates the stored track metadata to reflect the contents of the specified moov atom.
 */
private void processMoovAtom(ContainerAtom moov) throws ParserException {
    int firstVideoTrackIndex = C.INDEX_UNSET;
    long durationUs = C.TIME_UNSET;
    List<Mp4Track> tracks = new ArrayList<>();
    // Process metadata.
    @Nullable Metadata udtaMetaMetadata = null;
    @Nullable Metadata smtaMetadata = null;
    boolean isQuickTime = fileType == FILE_TYPE_QUICKTIME;
    GaplessInfoHolder gaplessInfoHolder = new GaplessInfoHolder();
    @Nullable Atom.LeafAtom udta = moov.getLeafAtomOfType(Atom.TYPE_udta);
    if (udta != null) {
        Pair<@NullableType Metadata, @NullableType Metadata> udtaMetadata = AtomParsers.parseUdta(udta);
        udtaMetaMetadata = udtaMetadata.first;
        smtaMetadata = udtaMetadata.second;
        if (udtaMetaMetadata != null) {
            gaplessInfoHolder.setFromMetadata(udtaMetaMetadata);
        }
    }
    @Nullable Metadata mdtaMetadata = null;
    @Nullable Atom.ContainerAtom meta = moov.getContainerAtomOfType(Atom.TYPE_meta);
    if (meta != null) {
        mdtaMetadata = AtomParsers.parseMdtaFromMeta(meta);
    }
    boolean ignoreEditLists = (flags & FLAG_WORKAROUND_IGNORE_EDIT_LISTS) != 0;
    List<TrackSampleTable> trackSampleTables = parseTraks(moov, gaplessInfoHolder, /* duration= */
    C.TIME_UNSET, /* drmInitData= */
    null, ignoreEditLists, isQuickTime, /* modifyTrackFunction= */
    track -> track);
    ExtractorOutput extractorOutput = checkNotNull(this.extractorOutput);
    int trackCount = trackSampleTables.size();
    for (int i = 0; i < trackCount; i++) {
        TrackSampleTable trackSampleTable = trackSampleTables.get(i);
        if (trackSampleTable.sampleCount == 0) {
            continue;
        }
        Track track = trackSampleTable.track;
        long trackDurationUs = track.durationUs != C.TIME_UNSET ? track.durationUs : trackSampleTable.durationUs;
        durationUs = max(durationUs, trackDurationUs);
        Mp4Track mp4Track = new Mp4Track(track, trackSampleTable, extractorOutput.track(i, track.type));
        int maxInputSize;
        if (MimeTypes.AUDIO_TRUEHD.equals(track.format.sampleMimeType)) {
            // TrueHD groups samples per chunks of TRUEHD_RECHUNK_SAMPLE_COUNT samples.
            maxInputSize = trackSampleTable.maximumSize * Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT;
        } else {
            // Each sample has up to three bytes of overhead for the start code that replaces its
            // length. Allow ten source samples per output sample, like the platform extractor.
            maxInputSize = trackSampleTable.maximumSize + 3 * 10;
        }
        Format.Builder formatBuilder = track.format.buildUpon();
        formatBuilder.setMaxInputSize(maxInputSize);
        if (track.type == C.TRACK_TYPE_VIDEO && trackDurationUs > 0 && trackSampleTable.sampleCount > 1) {
            float frameRate = trackSampleTable.sampleCount / (trackDurationUs / 1000000f);
            formatBuilder.setFrameRate(frameRate);
        }
        MetadataUtil.setFormatGaplessInfo(track.type, gaplessInfoHolder, formatBuilder);
        MetadataUtil.setFormatMetadata(track.type, udtaMetaMetadata, mdtaMetadata, formatBuilder, smtaMetadata, slowMotionMetadataEntries.isEmpty() ? null : new Metadata(slowMotionMetadataEntries));
        mp4Track.trackOutput.format(formatBuilder.build());
        if (track.type == C.TRACK_TYPE_VIDEO && firstVideoTrackIndex == C.INDEX_UNSET) {
            firstVideoTrackIndex = tracks.size();
        }
        tracks.add(mp4Track);
    }
    this.firstVideoTrackIndex = firstVideoTrackIndex;
    this.durationUs = durationUs;
    this.tracks = tracks.toArray(new Mp4Track[0]);
    accumulatedSampleSizes = calculateAccumulatedSampleSizes(this.tracks);
    extractorOutput.endTracks();
    extractorOutput.seekMap(this);
}
Also used : ExtractorOutput(androidx.media3.extractor.ExtractorOutput) ArrayList(java.util.ArrayList) Metadata(androidx.media3.common.Metadata) MotionPhotoMetadata(androidx.media3.extractor.metadata.mp4.MotionPhotoMetadata) ContainerAtom(androidx.media3.extractor.mp4.Atom.ContainerAtom) SeekPoint(androidx.media3.extractor.SeekPoint) ContainerAtom(androidx.media3.extractor.mp4.Atom.ContainerAtom) Format(androidx.media3.common.Format) GaplessInfoHolder(androidx.media3.extractor.GaplessInfoHolder) Nullable(androidx.annotation.Nullable)

Example 19 with ParserException

use of androidx.media3.common.ParserException in project media by androidx.

the class AtomParsers method parseVideoSampleEntry.

// hdrStaticInfo is allocated using allocate() in allocateHdrStaticInfo().
@SuppressWarnings("ByteBufferBackingArray")
private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, int size, int trackId, int rotationDegrees, @Nullable DrmInitData drmInitData, StsdData out, int entryIndex) throws ParserException {
    parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
    parent.skipBytes(16);
    int width = parent.readUnsignedShort();
    int height = parent.readUnsignedShort();
    boolean pixelWidthHeightRatioFromPasp = false;
    float pixelWidthHeightRatio = 1;
    parent.skipBytes(50);
    int childPosition = parent.getPosition();
    if (atomType == Atom.TYPE_encv) {
        @Nullable Pair<Integer, TrackEncryptionBox> sampleEntryEncryptionData = parseSampleEntryEncryptionData(parent, position, size);
        if (sampleEntryEncryptionData != null) {
            atomType = sampleEntryEncryptionData.first;
            drmInitData = drmInitData == null ? null : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType);
            out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second;
        }
        parent.setPosition(childPosition);
    }
    // TODO: Uncomment when [Internal: b/63092960] is fixed.
    // else {
    // drmInitData = null;
    // }
    @Nullable String mimeType = null;
    if (atomType == Atom.TYPE_m1v_) {
        mimeType = MimeTypes.VIDEO_MPEG;
    } else if (atomType == Atom.TYPE_H263) {
        mimeType = MimeTypes.VIDEO_H263;
    }
    @Nullable List<byte[]> initializationData = null;
    @Nullable String codecs = null;
    @Nullable byte[] projectionData = null;
    @C.StereoMode int stereoMode = Format.NO_VALUE;
    // HDR related metadata.
    @C.ColorSpace int colorSpace = Format.NO_VALUE;
    @C.ColorRange int colorRange = Format.NO_VALUE;
    @C.ColorTransfer int colorTransfer = Format.NO_VALUE;
    // The format of HDR static info is defined in CTA-861-G:2017, Table 45.
    @Nullable ByteBuffer hdrStaticInfo = null;
    while (childPosition - position < size) {
        parent.setPosition(childPosition);
        int childStartPosition = parent.getPosition();
        int childAtomSize = parent.readInt();
        if (childAtomSize == 0 && parent.getPosition() - position == size) {
            // Handle optional terminating four zero bytes in MOV files.
            break;
        }
        ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive");
        int childAtomType = parent.readInt();
        if (childAtomType == Atom.TYPE_avcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H264;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            AvcConfig avcConfig = AvcConfig.parse(parent);
            initializationData = avcConfig.initializationData;
            out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength;
            if (!pixelWidthHeightRatioFromPasp) {
                pixelWidthHeightRatio = avcConfig.pixelWidthHeightRatio;
            }
            codecs = avcConfig.codecs;
        } else if (childAtomType == Atom.TYPE_hvcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H265;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            HevcConfig hevcConfig = HevcConfig.parse(parent);
            initializationData = hevcConfig.initializationData;
            out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength;
            if (!pixelWidthHeightRatioFromPasp) {
                pixelWidthHeightRatio = hevcConfig.pixelWidthHeightRatio;
            }
            codecs = hevcConfig.codecs;
        } else if (childAtomType == Atom.TYPE_dvcC || childAtomType == Atom.TYPE_dvvC) {
            @Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent);
            if (dolbyVisionConfig != null) {
                codecs = dolbyVisionConfig.codecs;
                mimeType = MimeTypes.VIDEO_DOLBY_VISION;
            }
        } else if (childAtomType == Atom.TYPE_vpcC) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9;
        } else if (childAtomType == Atom.TYPE_av1C) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_AV1;
        } else if (childAtomType == Atom.TYPE_clli) {
            if (hdrStaticInfo == null) {
                hdrStaticInfo = allocateHdrStaticInfo();
            }
            // The contents of the clli box occupy the last 4 bytes of the HDR static info array. Note
            // that each field is read in big endian and written in little endian.
            hdrStaticInfo.position(21);
            // max_content_light_level.
            hdrStaticInfo.putShort(parent.readShort());
            // max_pic_average_light_level.
            hdrStaticInfo.putShort(parent.readShort());
        } else if (childAtomType == Atom.TYPE_mdcv) {
            if (hdrStaticInfo == null) {
                hdrStaticInfo = allocateHdrStaticInfo();
            }
            // The contents of the mdcv box occupy 20 bytes after the first byte of the HDR static info
            // array. Note that each field is read in big endian and written in little endian.
            short displayPrimariesGX = parent.readShort();
            short displayPrimariesGY = parent.readShort();
            short displayPrimariesBX = parent.readShort();
            short displayPrimariesBY = parent.readShort();
            short displayPrimariesRX = parent.readShort();
            short displayPrimariesRY = parent.readShort();
            short whitePointX = parent.readShort();
            short whitePointY = parent.readShort();
            long maxDisplayMasteringLuminance = parent.readUnsignedInt();
            long minDisplayMasteringLuminance = parent.readUnsignedInt();
            hdrStaticInfo.position(1);
            hdrStaticInfo.putShort(displayPrimariesRX);
            hdrStaticInfo.putShort(displayPrimariesRY);
            hdrStaticInfo.putShort(displayPrimariesGX);
            hdrStaticInfo.putShort(displayPrimariesGY);
            hdrStaticInfo.putShort(displayPrimariesBX);
            hdrStaticInfo.putShort(displayPrimariesBY);
            hdrStaticInfo.putShort(whitePointX);
            hdrStaticInfo.putShort(whitePointY);
            hdrStaticInfo.putShort((short) (maxDisplayMasteringLuminance / 10000));
            hdrStaticInfo.putShort((short) (minDisplayMasteringLuminance / 10000));
        } else if (childAtomType == Atom.TYPE_d263) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            mimeType = MimeTypes.VIDEO_H263;
        } else if (childAtomType == Atom.TYPE_esds) {
            ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
            null);
            Pair<@NullableType String, byte @NullableType []> mimeTypeAndInitializationDataBytes = parseEsdsFromParent(parent, childStartPosition);
            mimeType = mimeTypeAndInitializationDataBytes.first;
            @Nullable byte[] initializationDataBytes = mimeTypeAndInitializationDataBytes.second;
            if (initializationDataBytes != null) {
                initializationData = ImmutableList.of(initializationDataBytes);
            }
        } else if (childAtomType == Atom.TYPE_pasp) {
            pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
            pixelWidthHeightRatioFromPasp = true;
        } else if (childAtomType == Atom.TYPE_sv3d) {
            projectionData = parseProjFromParent(parent, childStartPosition, childAtomSize);
        } else if (childAtomType == Atom.TYPE_st3d) {
            int version = parent.readUnsignedByte();
            // Flags.
            parent.skipBytes(3);
            if (version == 0) {
                int layout = parent.readUnsignedByte();
                switch(layout) {
                    case 0:
                        stereoMode = C.STEREO_MODE_MONO;
                        break;
                    case 1:
                        stereoMode = C.STEREO_MODE_TOP_BOTTOM;
                        break;
                    case 2:
                        stereoMode = C.STEREO_MODE_LEFT_RIGHT;
                        break;
                    case 3:
                        stereoMode = C.STEREO_MODE_STEREO_MESH;
                        break;
                    default:
                        break;
                }
            }
        } else if (childAtomType == Atom.TYPE_colr) {
            int colorType = parent.readInt();
            if (colorType == TYPE_nclx || colorType == TYPE_nclc) {
                // For more info on syntax, see Section 8.5.2.2 in ISO/IEC 14496-12:2012(E) and
                // https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html.
                int colorPrimaries = parent.readUnsignedShort();
                int transferCharacteristics = parent.readUnsignedShort();
                // matrix_coefficients.
                parent.skipBytes(2);
                // Only try and read full_range_flag if the box is long enough. It should be present in
                // all colr boxes with type=nclx (Section 8.5.2.2 in ISO/IEC 14496-12:2012(E)) but some
                // device cameras record videos with type=nclx without this final flag (and therefore
                // size=18): https://github.com/google/ExoPlayer/issues/9332
                boolean fullRangeFlag = childAtomSize == 19 && (parent.readUnsignedByte() & 0b10000000) != 0;
                colorSpace = ColorInfo.isoColorPrimariesToColorSpace(colorPrimaries);
                colorRange = fullRangeFlag ? C.COLOR_RANGE_FULL : C.COLOR_RANGE_LIMITED;
                colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer(transferCharacteristics);
            } else {
                Log.w(TAG, "Unsupported color type: " + Atom.getAtomTypeString(colorType));
            }
        }
        childPosition += childAtomSize;
    }
    // If the media type was not recognized, ignore the track.
    if (mimeType == null) {
        return;
    }
    Format.Builder formatBuilder = new Format.Builder().setId(trackId).setSampleMimeType(mimeType).setCodecs(codecs).setWidth(width).setHeight(height).setPixelWidthHeightRatio(pixelWidthHeightRatio).setRotationDegrees(rotationDegrees).setProjectionData(projectionData).setStereoMode(stereoMode).setInitializationData(initializationData).setDrmInitData(drmInitData);
    if (colorSpace != Format.NO_VALUE || colorRange != Format.NO_VALUE || colorTransfer != Format.NO_VALUE || hdrStaticInfo != null) {
        // Note that if either mdcv or clli are missing, we leave the corresponding HDR static
        // metadata bytes with value zero. See [Internal ref: b/194535665].
        formatBuilder.setColorInfo(new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo != null ? hdrStaticInfo.array() : null));
    }
    out.format = formatBuilder.build();
}
Also used : DolbyVisionConfig(androidx.media3.extractor.DolbyVisionConfig) AvcConfig(androidx.media3.extractor.AvcConfig) ByteBuffer(java.nio.ByteBuffer) ColorInfo(androidx.media3.common.ColorInfo) HevcConfig(androidx.media3.extractor.HevcConfig) Format(androidx.media3.common.Format) Nullable(androidx.annotation.Nullable)

Example 20 with ParserException

use of androidx.media3.common.ParserException in project media by androidx.

the class LatmReader method parseStreamMuxConfig.

/**
 * Parses a StreamMuxConfig as defined in ISO/IEC 14496-3:2009 Section 1.7.3.1, Table 1.42.
 */
@RequiresNonNull("output")
private void parseStreamMuxConfig(ParsableBitArray data) throws ParserException {
    int audioMuxVersion = data.readBits(1);
    audioMuxVersionA = audioMuxVersion == 1 ? data.readBits(1) : 0;
    if (audioMuxVersionA == 0) {
        if (audioMuxVersion == 1) {
            // Skip taraBufferFullness.
            latmGetValue(data);
        }
        if (!data.readBit()) {
            throw ParserException.createForMalformedContainer(/* message= */
            null, /* cause= */
            null);
        }
        numSubframes = data.readBits(6);
        int numProgram = data.readBits(4);
        int numLayer = data.readBits(3);
        if (numProgram != 0 || numLayer != 0) {
            throw ParserException.createForMalformedContainer(/* message= */
            null, /* cause= */
            null);
        }
        if (audioMuxVersion == 0) {
            int startPosition = data.getPosition();
            int readBits = parseAudioSpecificConfig(data);
            data.setPosition(startPosition);
            byte[] initData = new byte[(readBits + 7) / 8];
            data.readBits(initData, 0, readBits);
            Format format = new Format.Builder().setId(formatId).setSampleMimeType(MimeTypes.AUDIO_AAC).setCodecs(codecs).setChannelCount(channelCount).setSampleRate(sampleRateHz).setInitializationData(Collections.singletonList(initData)).setLanguage(language).build();
            if (!format.equals(this.format)) {
                this.format = format;
                sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate;
                output.format(format);
            }
        } else {
            int ascLen = (int) latmGetValue(data);
            int bitsRead = parseAudioSpecificConfig(data);
            // fillBits.
            data.skipBits(ascLen - bitsRead);
        }
        parseFrameLength(data);
        otherDataPresent = data.readBit();
        otherDataLenBits = 0;
        if (otherDataPresent) {
            if (audioMuxVersion == 1) {
                otherDataLenBits = latmGetValue(data);
            } else {
                boolean otherDataLenEsc;
                do {
                    otherDataLenEsc = data.readBit();
                    otherDataLenBits = (otherDataLenBits << 8) + data.readBits(8);
                } while (otherDataLenEsc);
            }
        }
        boolean crcCheckPresent = data.readBit();
        if (crcCheckPresent) {
            // crcCheckSum.
            data.skipBits(8);
        }
    } else {
        // This is not defined by ISO/IEC 14496-3:2009.
        throw ParserException.createForMalformedContainer(/* message= */
        null, /* cause= */
        null);
    }
}
Also used : Format(androidx.media3.common.Format) RequiresNonNull(org.checkerframework.checker.nullness.qual.RequiresNonNull)

Aggregations

Test (org.junit.Test)25 ParsableByteArray (androidx.media3.common.util.ParsableByteArray)15 MediaItem (androidx.media3.common.MediaItem)13 Timeline (androidx.media3.common.Timeline)13 Nullable (androidx.annotation.Nullable)12 ParserException (androidx.media3.common.ParserException)11 Format (androidx.media3.common.Format)6 PositionHolder (androidx.media3.extractor.PositionHolder)5 FakeExtractorInput (androidx.media3.test.utils.FakeExtractorInput)5 LeafAtom (androidx.media3.extractor.mp4.Atom.LeafAtom)4 DrmInitData (androidx.media3.common.DrmInitData)3 ContainerAtom (androidx.media3.extractor.mp4.Atom.ContainerAtom)3 RequiresNonNull (org.checkerframework.checker.nullness.qual.RequiresNonNull)3 Uri (android.net.Uri)2 CallSuper (androidx.annotation.CallSuper)2 SchemeData (androidx.media3.common.DrmInitData.SchemeData)2 AacUtil (androidx.media3.extractor.AacUtil)2 AvcConfig (androidx.media3.extractor.AvcConfig)2 GaplessInfoHolder (androidx.media3.extractor.GaplessInfoHolder)2 ArrayList (java.util.ArrayList)2