Search in sources :

Example 6 with DrmInitData

use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.

the class FormatTest method testParcelable.

public void testParcelable() {
    DrmInitData.SchemeData DRM_DATA_1 = new DrmInitData.SchemeData(WIDEVINE_UUID, VIDEO_MP4, TestUtil.buildTestData(128, 1));
    DrmInitData.SchemeData DRM_DATA_2 = new DrmInitData.SchemeData(C.UUID_NIL, VIDEO_WEBM, TestUtil.buildTestData(128, 1));
    DrmInitData drmInitData = new DrmInitData(DRM_DATA_1, DRM_DATA_2);
    byte[] projectionData = new byte[] { 1, 2, 3 };
    Metadata metadata = new Metadata(new TextInformationFrame("id1", "description1", "value1"), new TextInformationFrame("id2", "description2", "value2"));
    Format formatToParcel = new Format("id", MimeTypes.VIDEO_MP4, MimeTypes.VIDEO_H264, null, 1024, 2048, 1920, 1080, 24, 90, 2, projectionData, C.STEREO_MODE_TOP_BOTTOM, 6, 44100, C.ENCODING_PCM_24BIT, 1001, 1002, 0, "und", Format.NO_VALUE, Format.OFFSET_SAMPLE_RELATIVE, INIT_DATA, drmInitData, metadata);
    Parcel parcel = Parcel.obtain();
    formatToParcel.writeToParcel(parcel, 0);
    parcel.setDataPosition(0);
    Format formatFromParcel = Format.CREATOR.createFromParcel(parcel);
    assertEquals(formatToParcel, formatFromParcel);
    parcel.recycle();
}
Also used : DrmInitData(com.google.android.exoplayer2.drm.DrmInitData) MediaFormat(android.media.MediaFormat) Parcel(android.os.Parcel) Metadata(com.google.android.exoplayer2.metadata.Metadata) TextInformationFrame(com.google.android.exoplayer2.metadata.id3.TextInformationFrame)

Example 7 with DrmInitData

use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.

the class MediaCodecVideoRenderer method supportsFormat.

@Override
protected int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException {
    String mimeType = format.sampleMimeType;
    if (!MimeTypes.isVideo(mimeType)) {
        return FORMAT_UNSUPPORTED_TYPE;
    }
    boolean requiresSecureDecryption = false;
    DrmInitData drmInitData = format.drmInitData;
    if (drmInitData != null) {
        for (int i = 0; i < drmInitData.schemeDataCount; i++) {
            requiresSecureDecryption |= drmInitData.get(i).requiresSecureDecryption;
        }
    }
    MediaCodecInfo decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, requiresSecureDecryption);
    if (decoderInfo == null) {
        return FORMAT_UNSUPPORTED_SUBTYPE;
    }
    boolean decoderCapable = decoderInfo.isCodecSupported(format.codecs);
    if (decoderCapable && format.width > 0 && format.height > 0) {
        if (Util.SDK_INT >= 21) {
            decoderCapable = decoderInfo.isVideoSizeAndRateSupportedV21(format.width, format.height, format.frameRate);
        } else {
            decoderCapable = format.width * format.height <= MediaCodecUtil.maxH264DecodableFrameSize();
            if (!decoderCapable) {
                Log.d(TAG, "FalseCheck [legacyFrameSize, " + format.width + "x" + format.height + "] [" + Util.DEVICE_DEBUG_INFO + "]");
            }
        }
    }
    int adaptiveSupport = decoderInfo.adaptive ? ADAPTIVE_SEAMLESS : ADAPTIVE_NOT_SEAMLESS;
    int tunnelingSupport = decoderInfo.tunneling ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
    int formatSupport = decoderCapable ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES;
    return adaptiveSupport | tunnelingSupport | formatSupport;
}
Also used : DrmInitData(com.google.android.exoplayer2.drm.DrmInitData) MediaCodecInfo(com.google.android.exoplayer2.mediacodec.MediaCodecInfo) SuppressLint(android.annotation.SuppressLint) Point(android.graphics.Point)

Example 8 with DrmInitData

use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.

the class DashManifestParser method buildRepresentation.

protected Representation buildRepresentation(RepresentationInfo representationInfo, String contentId, ArrayList<SchemeData> extraDrmSchemeDatas, ArrayList<SchemeValuePair> extraInbandEventStreams) {
    Format format = representationInfo.format;
    ArrayList<SchemeData> drmSchemeDatas = representationInfo.drmSchemeDatas;
    drmSchemeDatas.addAll(extraDrmSchemeDatas);
    if (!drmSchemeDatas.isEmpty()) {
        format = format.copyWithDrmInitData(new DrmInitData(drmSchemeDatas));
    }
    ArrayList<SchemeValuePair> inbandEventStremas = representationInfo.inbandEventStreams;
    inbandEventStremas.addAll(extraInbandEventStreams);
    return Representation.newInstance(contentId, Representation.REVISION_ID_DEFAULT, format, representationInfo.baseUrl, representationInfo.segmentBase, inbandEventStremas);
}
Also used : Format(com.google.android.exoplayer2.Format) DrmInitData(com.google.android.exoplayer2.drm.DrmInitData) SchemeData(com.google.android.exoplayer2.drm.DrmInitData.SchemeData)

Example 9 with DrmInitData

use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.

the class AtomParsers method parseVideoSampleEntry.

private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, int size, int trackId, int rotationDegrees, DrmInitData drmInitData, StsdData out, int entryIndex) throws ParserException {
    parent.setPosition(position + Atom.HEADER_SIZE);
    parent.skipBytes(24);
    int width = parent.readUnsignedShort();
    int height = parent.readUnsignedShort();
    boolean pixelWidthHeightRatioFromPasp = false;
    float pixelWidthHeightRatio = 1;
    parent.skipBytes(50);
    int childPosition = parent.getPosition();
    if (atomType == Atom.TYPE_encv) {
        atomType = parseSampleEntryEncryptionData(parent, position, size, out, entryIndex);
        parent.setPosition(childPosition);
    }
    List<byte[]> initializationData = null;
    String mimeType = null;
    byte[] projectionData = null;
    @C.StereoMode int stereoMode = Format.NO_VALUE;
    while (childPosition - position < size) {
        parent.setPosition(childPosition);
        int childStartPosition = parent.getPosition();
        int childAtomSize = parent.readInt();
        if (childAtomSize == 0 && parent.getPosition() - position == size) {
            // Handle optional terminating four zero bytes in MOV files.
            break;
        }
        Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive");
        int childAtomType = parent.readInt();
        if (childAtomType == Atom.TYPE_avcC) {
            Assertions.checkState(mimeType == null);
            mimeType = MimeTypes.VIDEO_H264;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            AvcConfig avcConfig = AvcConfig.parse(parent);
            initializationData = avcConfig.initializationData;
            out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength;
            if (!pixelWidthHeightRatioFromPasp) {
                pixelWidthHeightRatio = avcConfig.pixelWidthAspectRatio;
            }
        } else if (childAtomType == Atom.TYPE_hvcC) {
            Assertions.checkState(mimeType == null);
            mimeType = MimeTypes.VIDEO_H265;
            parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
            HevcConfig hevcConfig = HevcConfig.parse(parent);
            initializationData = hevcConfig.initializationData;
            out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength;
        } else if (childAtomType == Atom.TYPE_vpcC) {
            Assertions.checkState(mimeType == null);
            mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9;
        } else if (childAtomType == Atom.TYPE_d263) {
            Assertions.checkState(mimeType == null);
            mimeType = MimeTypes.VIDEO_H263;
        } else if (childAtomType == Atom.TYPE_esds) {
            Assertions.checkState(mimeType == null);
            Pair<String, byte[]> mimeTypeAndInitializationData = parseEsdsFromParent(parent, childStartPosition);
            mimeType = mimeTypeAndInitializationData.first;
            initializationData = Collections.singletonList(mimeTypeAndInitializationData.second);
        } else if (childAtomType == Atom.TYPE_pasp) {
            pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
            pixelWidthHeightRatioFromPasp = true;
        } else if (childAtomType == Atom.TYPE_sv3d) {
            projectionData = parseProjFromParent(parent, childStartPosition, childAtomSize);
        } else if (childAtomType == Atom.TYPE_st3d) {
            int version = parent.readUnsignedByte();
            // Flags.
            parent.skipBytes(3);
            if (version == 0) {
                int layout = parent.readUnsignedByte();
                switch(layout) {
                    case 0:
                        stereoMode = C.STEREO_MODE_MONO;
                        break;
                    case 1:
                        stereoMode = C.STEREO_MODE_TOP_BOTTOM;
                        break;
                    case 2:
                        stereoMode = C.STEREO_MODE_LEFT_RIGHT;
                        break;
                    case 3:
                        stereoMode = C.STEREO_MODE_STEREO_MESH;
                        break;
                    default:
                        break;
                }
            }
        }
        childPosition += childAtomSize;
    }
    // If the media type was not recognized, ignore the track.
    if (mimeType == null) {
        return;
    }
    out.format = Format.createVideoSampleFormat(Integer.toString(trackId), mimeType, null, Format.NO_VALUE, Format.NO_VALUE, width, height, Format.NO_VALUE, initializationData, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode, drmInitData);
}
Also used : HevcConfig(com.google.android.exoplayer2.video.HevcConfig) AvcConfig(com.google.android.exoplayer2.video.AvcConfig)

Example 10 with DrmInitData

use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.

the class FragmentedMp4Extractor method onMoovContainerAtomRead.

private void onMoovContainerAtomRead(ContainerAtom moov) throws ParserException {
    Assertions.checkState(sideloadedTrack == null, "Unexpected moov box.");
    DrmInitData drmInitData = getDrmInitDataFromAtoms(moov.leafChildren);
    // Read declaration of track fragments in the Moov box.
    ContainerAtom mvex = moov.getContainerAtomOfType(Atom.TYPE_mvex);
    SparseArray<DefaultSampleValues> defaultSampleValuesArray = new SparseArray<>();
    long duration = C.TIME_UNSET;
    int mvexChildrenSize = mvex.leafChildren.size();
    for (int i = 0; i < mvexChildrenSize; i++) {
        Atom.LeafAtom atom = mvex.leafChildren.get(i);
        if (atom.type == Atom.TYPE_trex) {
            Pair<Integer, DefaultSampleValues> trexData = parseTrex(atom.data);
            defaultSampleValuesArray.put(trexData.first, trexData.second);
        } else if (atom.type == Atom.TYPE_mehd) {
            duration = parseMehd(atom.data);
        }
    }
    // Construction of tracks.
    SparseArray<Track> tracks = new SparseArray<>();
    int moovContainerChildrenSize = moov.containerChildren.size();
    for (int i = 0; i < moovContainerChildrenSize; i++) {
        Atom.ContainerAtom atom = moov.containerChildren.get(i);
        if (atom.type == Atom.TYPE_trak) {
            Track track = AtomParsers.parseTrak(atom, moov.getLeafAtomOfType(Atom.TYPE_mvhd), duration, drmInitData, false);
            if (track != null) {
                tracks.put(track.id, track);
            }
        }
    }
    int trackCount = tracks.size();
    if (trackBundles.size() == 0) {
        // We need to create the track bundles.
        for (int i = 0; i < trackCount; i++) {
            Track track = tracks.valueAt(i);
            TrackBundle trackBundle = new TrackBundle(extractorOutput.track(i, track.type));
            trackBundle.init(track, defaultSampleValuesArray.get(track.id));
            trackBundles.put(track.id, trackBundle);
            durationUs = Math.max(durationUs, track.durationUs);
        }
        maybeInitExtraTracks();
        extractorOutput.endTracks();
    } else {
        Assertions.checkState(trackBundles.size() == trackCount);
        for (int i = 0; i < trackCount; i++) {
            Track track = tracks.valueAt(i);
            trackBundles.get(track.id).init(track, defaultSampleValuesArray.get(track.id));
        }
    }
}
Also used : ContainerAtom(com.google.android.exoplayer2.extractor.mp4.Atom.ContainerAtom) LeafAtom(com.google.android.exoplayer2.extractor.mp4.Atom.LeafAtom) ContainerAtom(com.google.android.exoplayer2.extractor.mp4.Atom.ContainerAtom) ContainerAtom(com.google.android.exoplayer2.extractor.mp4.Atom.ContainerAtom) LeafAtom(com.google.android.exoplayer2.extractor.mp4.Atom.LeafAtom) SparseArray(android.util.SparseArray) DrmInitData(com.google.android.exoplayer2.drm.DrmInitData)

Aggregations

DrmInitData (com.google.android.exoplayer2.drm.DrmInitData)8 SchemeData (com.google.android.exoplayer2.drm.DrmInitData.SchemeData)4 SuppressLint (android.annotation.SuppressLint)2 MediaFormat (android.media.MediaFormat)2 Format (com.google.android.exoplayer2.Format)2 LeafAtom (com.google.android.exoplayer2.extractor.mp4.Atom.LeafAtom)2 Point (android.graphics.Point)1 HandlerThread (android.os.HandlerThread)1 Parcel (android.os.Parcel)1 SparseArray (android.util.SparseArray)1 ParserException (com.google.android.exoplayer2.ParserException)1 DrmSessionException (com.google.android.exoplayer2.drm.DrmSession.DrmSessionException)1 ContainerAtom (com.google.android.exoplayer2.extractor.mp4.Atom.ContainerAtom)1 MediaCodecInfo (com.google.android.exoplayer2.mediacodec.MediaCodecInfo)1 Metadata (com.google.android.exoplayer2.metadata.Metadata)1 TextInformationFrame (com.google.android.exoplayer2.metadata.id3.TextInformationFrame)1 AdaptationSet (com.google.android.exoplayer2.source.dash.manifest.AdaptationSet)1 Period (com.google.android.exoplayer2.source.dash.manifest.Period)1 Representation (com.google.android.exoplayer2.source.dash.manifest.Representation)1 ParsableBitArray (com.google.android.exoplayer2.util.ParsableBitArray)1