use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.
the class HlsMediaPeriod method buildAndPrepareAudioSampleStreamWrappers.
private void buildAndPrepareAudioSampleStreamWrappers(long positionUs, List<Rendition> audioRenditions, List<HlsSampleStreamWrapper> sampleStreamWrappers, List<int[]> manifestUrlsIndicesPerWrapper, Map<String, DrmInitData> overridingDrmInitData) {
ArrayList<Uri> scratchPlaylistUrls = new ArrayList<>(/* initialCapacity= */
audioRenditions.size());
ArrayList<Format> scratchPlaylistFormats = new ArrayList<>(/* initialCapacity= */
audioRenditions.size());
ArrayList<Integer> scratchIndicesList = new ArrayList<>(/* initialCapacity= */
audioRenditions.size());
HashSet<String> alreadyGroupedNames = new HashSet<>();
for (int renditionByNameIndex = 0; renditionByNameIndex < audioRenditions.size(); renditionByNameIndex++) {
String name = audioRenditions.get(renditionByNameIndex).name;
if (!alreadyGroupedNames.add(name)) {
// This name already has a corresponding group.
continue;
}
boolean codecStringsAllowChunklessPreparation = true;
scratchPlaylistUrls.clear();
scratchPlaylistFormats.clear();
scratchIndicesList.clear();
// Group all renditions with matching name.
for (int renditionIndex = 0; renditionIndex < audioRenditions.size(); renditionIndex++) {
if (Util.areEqual(name, audioRenditions.get(renditionIndex).name)) {
Rendition rendition = audioRenditions.get(renditionIndex);
scratchIndicesList.add(renditionIndex);
scratchPlaylistUrls.add(rendition.url);
scratchPlaylistFormats.add(rendition.format);
codecStringsAllowChunklessPreparation &= Util.getCodecCountOfType(rendition.format.codecs, C.TRACK_TYPE_AUDIO) == 1;
}
}
String sampleStreamWrapperUid = "audio:" + name;
HlsSampleStreamWrapper sampleStreamWrapper = buildSampleStreamWrapper(sampleStreamWrapperUid, C.TRACK_TYPE_AUDIO, scratchPlaylistUrls.toArray(Util.castNonNullTypeArray(new Uri[0])), scratchPlaylistFormats.toArray(new Format[0]), /* muxedAudioFormat= */
null, /* muxedCaptionFormats= */
Collections.emptyList(), overridingDrmInitData, positionUs);
manifestUrlsIndicesPerWrapper.add(Ints.toArray(scratchIndicesList));
sampleStreamWrappers.add(sampleStreamWrapper);
if (allowChunklessPreparation && codecStringsAllowChunklessPreparation) {
Format[] renditionFormats = scratchPlaylistFormats.toArray(new Format[0]);
sampleStreamWrapper.prepareWithMultivariantPlaylistInfo(new TrackGroup[] { new TrackGroup(sampleStreamWrapperUid, renditionFormats) }, /* primaryTrackGroupIndex= */
0);
}
}
}
use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.
the class Format method withManifestFormatInfo.
@SuppressWarnings("ReferenceEquality")
public Format withManifestFormatInfo(Format manifestFormat) {
if (this == manifestFormat) {
// No need to copy from ourselves.
return this;
}
@C.TrackType int trackType = MimeTypes.getTrackType(sampleMimeType);
// Use manifest value only.
@Nullable String id = manifestFormat.id;
// Prefer manifest values, but fill in from sample format if missing.
@Nullable String label = manifestFormat.label != null ? manifestFormat.label : this.label;
@Nullable String language = this.language;
if ((trackType == C.TRACK_TYPE_TEXT || trackType == C.TRACK_TYPE_AUDIO) && manifestFormat.language != null) {
language = manifestFormat.language;
}
// Prefer sample format values, but fill in from manifest if missing.
int averageBitrate = this.averageBitrate == NO_VALUE ? manifestFormat.averageBitrate : this.averageBitrate;
int peakBitrate = this.peakBitrate == NO_VALUE ? manifestFormat.peakBitrate : this.peakBitrate;
@Nullable String codecs = this.codecs;
if (codecs == null) {
// The manifest format may be muxed, so filter only codecs of this format's type. If we still
// have more than one codec then we're unable to uniquely identify which codec to fill in.
@Nullable String codecsOfType = Util.getCodecsOfType(manifestFormat.codecs, trackType);
if (Util.splitCodecs(codecsOfType).length == 1) {
codecs = codecsOfType;
}
}
@Nullable Metadata metadata = this.metadata == null ? manifestFormat.metadata : this.metadata.copyWithAppendedEntriesFrom(manifestFormat.metadata);
float frameRate = this.frameRate;
if (frameRate == NO_VALUE && trackType == C.TRACK_TYPE_VIDEO) {
frameRate = manifestFormat.frameRate;
}
// Merge manifest and sample format values.
@C.SelectionFlags int selectionFlags = this.selectionFlags | manifestFormat.selectionFlags;
@C.RoleFlags int roleFlags = this.roleFlags | manifestFormat.roleFlags;
@Nullable DrmInitData drmInitData = DrmInitData.createSessionCreationData(manifestFormat.drmInitData, this.drmInitData);
return buildUpon().setId(id).setLabel(label).setLanguage(language).setSelectionFlags(selectionFlags).setRoleFlags(roleFlags).setAverageBitrate(averageBitrate).setPeakBitrate(peakBitrate).setCodecs(codecs).setMetadata(metadata).setDrmInitData(drmInitData).setFrameRate(frameRate).build();
}
use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.
the class DefaultDrmSessionManagerTest method maxConcurrentSessionsExceeded_allKeepAliveSessionsEagerlyReleased.
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allKeepAliveSessionsEagerlyReleased() throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas = ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer = FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData = new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager = new DefaultDrmSessionManager.Builder().setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm(/* maxConcurrentSessions= */
1)).setSessionKeepaliveMs(10_000).setMultiSession(true).build(/* mediaDrmCallback= */
licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */
Looper.myLooper(), PlayerId.UNSET);
DrmSession firstDrmSession = checkNotNull(drmSessionManager.acquireSession(/* eventDispatcher= */
null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */
null);
// All external references to firstDrmSession have been released, it's being kept alive by
// drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession = checkNotNull(drmSessionManager.acquireSession(/* eventDispatcher= */
null, secondFormatWithDrmInitData));
// The drmSessionManager had to release firstDrmSession in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.
the class DefaultDrmSessionManagerTest method maxConcurrentSessionsExceeded_allPreacquiredAndKeepaliveSessionsEagerlyReleased.
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allPreacquiredAndKeepaliveSessionsEagerlyReleased() throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas = ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer = FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData = new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager = new DefaultDrmSessionManager.Builder().setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm.Builder().setMaxConcurrentSessions(1).build()).setSessionKeepaliveMs(10_000).setMultiSession(true).build(/* mediaDrmCallback= */
licenseServer);
drmSessionManager.prepare();
drmSessionManager.setPlayer(/* playbackLooper= */
Looper.myLooper(), PlayerId.UNSET);
DrmSessionReference firstDrmSessionReference = checkNotNull(drmSessionManager.preacquireSession(/* eventDispatcher= */
null, FORMAT_WITH_DRM_INIT_DATA));
DrmSession firstDrmSession = checkNotNull(drmSessionManager.acquireSession(/* eventDispatcher= */
null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */
null);
// The direct reference to firstDrmSession has been released, it's being kept alive by both
// firstDrmSessionReference and drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession = checkNotNull(drmSessionManager.acquireSession(/* eventDispatcher= */
null, secondFormatWithDrmInitData));
// The drmSessionManager had to release both it's internal keep-alive reference and the
// reference represented by firstDrmSessionReference in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Not needed (because the manager has already released this reference) but we call it anyway
// for completeness.
firstDrmSessionReference.release();
// Clean-up
secondDrmSession.release(/* eventDispatcher= */
null);
drmSessionManager.release();
}
use of com.google.android.exoplayer2.drm.DrmInitData in project ExoPlayer by google.
the class AtomParsers method parseVideoSampleEntry.
// hdrStaticInfo is allocated using allocate() in allocateHdrStaticInfo().
@SuppressWarnings("ByteBufferBackingArray")
private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, int size, int trackId, int rotationDegrees, @Nullable DrmInitData drmInitData, StsdData out, int entryIndex) throws ParserException {
parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
parent.skipBytes(16);
int width = parent.readUnsignedShort();
int height = parent.readUnsignedShort();
boolean pixelWidthHeightRatioFromPasp = false;
float pixelWidthHeightRatio = 1;
parent.skipBytes(50);
int childPosition = parent.getPosition();
if (atomType == Atom.TYPE_encv) {
@Nullable Pair<Integer, TrackEncryptionBox> sampleEntryEncryptionData = parseSampleEntryEncryptionData(parent, position, size);
if (sampleEntryEncryptionData != null) {
atomType = sampleEntryEncryptionData.first;
drmInitData = drmInitData == null ? null : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType);
out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second;
}
parent.setPosition(childPosition);
}
// TODO: Uncomment when [Internal: b/63092960] is fixed.
// else {
// drmInitData = null;
// }
@Nullable String mimeType = null;
if (atomType == Atom.TYPE_m1v_) {
mimeType = MimeTypes.VIDEO_MPEG;
} else if (atomType == Atom.TYPE_H263) {
mimeType = MimeTypes.VIDEO_H263;
}
@Nullable List<byte[]> initializationData = null;
@Nullable String codecs = null;
@Nullable byte[] projectionData = null;
@C.StereoMode int stereoMode = Format.NO_VALUE;
// HDR related metadata.
@C.ColorSpace int colorSpace = Format.NO_VALUE;
@C.ColorRange int colorRange = Format.NO_VALUE;
@C.ColorTransfer int colorTransfer = Format.NO_VALUE;
// The format of HDR static info is defined in CTA-861-G:2017, Table 45.
@Nullable ByteBuffer hdrStaticInfo = null;
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
if (childAtomSize == 0 && parent.getPosition() - position == size) {
// Handle optional terminating four zero bytes in MOV files.
break;
}
ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive");
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_avcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H264;
parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
AvcConfig avcConfig = AvcConfig.parse(parent);
initializationData = avcConfig.initializationData;
out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength;
if (!pixelWidthHeightRatioFromPasp) {
pixelWidthHeightRatio = avcConfig.pixelWidthHeightRatio;
}
codecs = avcConfig.codecs;
} else if (childAtomType == Atom.TYPE_hvcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H265;
parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
HevcConfig hevcConfig = HevcConfig.parse(parent);
initializationData = hevcConfig.initializationData;
out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength;
if (!pixelWidthHeightRatioFromPasp) {
pixelWidthHeightRatio = hevcConfig.pixelWidthHeightRatio;
}
codecs = hevcConfig.codecs;
} else if (childAtomType == Atom.TYPE_dvcC || childAtomType == Atom.TYPE_dvvC) {
@Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent);
if (dolbyVisionConfig != null) {
codecs = dolbyVisionConfig.codecs;
mimeType = MimeTypes.VIDEO_DOLBY_VISION;
}
} else if (childAtomType == Atom.TYPE_vpcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9;
} else if (childAtomType == Atom.TYPE_av1C) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_AV1;
} else if (childAtomType == Atom.TYPE_clli) {
if (hdrStaticInfo == null) {
hdrStaticInfo = allocateHdrStaticInfo();
}
// The contents of the clli box occupy the last 4 bytes of the HDR static info array. Note
// that each field is read in big endian and written in little endian.
hdrStaticInfo.position(21);
// max_content_light_level.
hdrStaticInfo.putShort(parent.readShort());
// max_pic_average_light_level.
hdrStaticInfo.putShort(parent.readShort());
} else if (childAtomType == Atom.TYPE_mdcv) {
if (hdrStaticInfo == null) {
hdrStaticInfo = allocateHdrStaticInfo();
}
// The contents of the mdcv box occupy 20 bytes after the first byte of the HDR static info
// array. Note that each field is read in big endian and written in little endian.
short displayPrimariesGX = parent.readShort();
short displayPrimariesGY = parent.readShort();
short displayPrimariesBX = parent.readShort();
short displayPrimariesBY = parent.readShort();
short displayPrimariesRX = parent.readShort();
short displayPrimariesRY = parent.readShort();
short whitePointX = parent.readShort();
short whitePointY = parent.readShort();
long maxDisplayMasteringLuminance = parent.readUnsignedInt();
long minDisplayMasteringLuminance = parent.readUnsignedInt();
hdrStaticInfo.position(1);
hdrStaticInfo.putShort(displayPrimariesRX);
hdrStaticInfo.putShort(displayPrimariesRY);
hdrStaticInfo.putShort(displayPrimariesGX);
hdrStaticInfo.putShort(displayPrimariesGY);
hdrStaticInfo.putShort(displayPrimariesBX);
hdrStaticInfo.putShort(displayPrimariesBY);
hdrStaticInfo.putShort(whitePointX);
hdrStaticInfo.putShort(whitePointY);
hdrStaticInfo.putShort((short) (maxDisplayMasteringLuminance / 10000));
hdrStaticInfo.putShort((short) (minDisplayMasteringLuminance / 10000));
} else if (childAtomType == Atom.TYPE_d263) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H263;
} else if (childAtomType == Atom.TYPE_esds) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
Pair<@NullableType String, byte @NullableType []> mimeTypeAndInitializationDataBytes = parseEsdsFromParent(parent, childStartPosition);
mimeType = mimeTypeAndInitializationDataBytes.first;
@Nullable byte[] initializationDataBytes = mimeTypeAndInitializationDataBytes.second;
if (initializationDataBytes != null) {
initializationData = ImmutableList.of(initializationDataBytes);
}
} else if (childAtomType == Atom.TYPE_pasp) {
pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
pixelWidthHeightRatioFromPasp = true;
} else if (childAtomType == Atom.TYPE_sv3d) {
projectionData = parseProjFromParent(parent, childStartPosition, childAtomSize);
} else if (childAtomType == Atom.TYPE_st3d) {
int version = parent.readUnsignedByte();
// Flags.
parent.skipBytes(3);
if (version == 0) {
int layout = parent.readUnsignedByte();
switch(layout) {
case 0:
stereoMode = C.STEREO_MODE_MONO;
break;
case 1:
stereoMode = C.STEREO_MODE_TOP_BOTTOM;
break;
case 2:
stereoMode = C.STEREO_MODE_LEFT_RIGHT;
break;
case 3:
stereoMode = C.STEREO_MODE_STEREO_MESH;
break;
default:
break;
}
}
} else if (childAtomType == Atom.TYPE_colr) {
int colorType = parent.readInt();
if (colorType == TYPE_nclx || colorType == TYPE_nclc) {
// For more info on syntax, see Section 8.5.2.2 in ISO/IEC 14496-12:2012(E) and
// https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html.
int colorPrimaries = parent.readUnsignedShort();
int transferCharacteristics = parent.readUnsignedShort();
// matrix_coefficients.
parent.skipBytes(2);
// Only try and read full_range_flag if the box is long enough. It should be present in
// all colr boxes with type=nclx (Section 8.5.2.2 in ISO/IEC 14496-12:2012(E)) but some
// device cameras record videos with type=nclx without this final flag (and therefore
// size=18): https://github.com/google/ExoPlayer/issues/9332
boolean fullRangeFlag = childAtomSize == 19 && (parent.readUnsignedByte() & 0b10000000) != 0;
colorSpace = ColorInfo.isoColorPrimariesToColorSpace(colorPrimaries);
colorRange = fullRangeFlag ? C.COLOR_RANGE_FULL : C.COLOR_RANGE_LIMITED;
colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer(transferCharacteristics);
} else {
Log.w(TAG, "Unsupported color type: " + Atom.getAtomTypeString(colorType));
}
}
childPosition += childAtomSize;
}
// If the media type was not recognized, ignore the track.
if (mimeType == null) {
return;
}
Format.Builder formatBuilder = new Format.Builder().setId(trackId).setSampleMimeType(mimeType).setCodecs(codecs).setWidth(width).setHeight(height).setPixelWidthHeightRatio(pixelWidthHeightRatio).setRotationDegrees(rotationDegrees).setProjectionData(projectionData).setStereoMode(stereoMode).setInitializationData(initializationData).setDrmInitData(drmInitData);
if (colorSpace != Format.NO_VALUE || colorRange != Format.NO_VALUE || colorTransfer != Format.NO_VALUE || hdrStaticInfo != null) {
// Note that if either mdcv or clli are missing, we leave the corresponding HDR static
// metadata bytes with value zero. See [Internal ref: b/194535665].
formatBuilder.setColorInfo(new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo != null ? hdrStaticInfo.array() : null));
}
out.format = formatBuilder.build();
}
Aggregations