use of androidx.media3.common.util.ParsableByteArray in project media by androidx.
the class AtomParsers method parseMdtaFromMeta.
/**
* Parses a metadata meta atom if it contains metadata with handler 'mdta'.
*
* @param meta The metadata atom to decode.
* @return Parsed metadata, or null.
*/
@Nullable
public static Metadata parseMdtaFromMeta(Atom.ContainerAtom meta) {
@Nullable Atom.LeafAtom hdlrAtom = meta.getLeafAtomOfType(Atom.TYPE_hdlr);
@Nullable Atom.LeafAtom keysAtom = meta.getLeafAtomOfType(Atom.TYPE_keys);
@Nullable Atom.LeafAtom ilstAtom = meta.getLeafAtomOfType(Atom.TYPE_ilst);
if (hdlrAtom == null || keysAtom == null || ilstAtom == null || parseHdlr(hdlrAtom.data) != TYPE_mdta) {
// There isn't enough information to parse the metadata, or the handler type is unexpected.
return null;
}
// Parse metadata keys.
ParsableByteArray keys = keysAtom.data;
keys.setPosition(Atom.FULL_HEADER_SIZE);
int entryCount = keys.readInt();
String[] keyNames = new String[entryCount];
for (int i = 0; i < entryCount; i++) {
int entrySize = keys.readInt();
// keyNamespace
keys.skipBytes(4);
int keySize = entrySize - 8;
keyNames[i] = keys.readString(keySize);
}
// Parse metadata items.
ParsableByteArray ilst = ilstAtom.data;
ilst.setPosition(Atom.HEADER_SIZE);
ArrayList<Metadata.Entry> entries = new ArrayList<>();
while (ilst.bytesLeft() > Atom.HEADER_SIZE) {
int atomPosition = ilst.getPosition();
int atomSize = ilst.readInt();
int keyIndex = ilst.readInt() - 1;
if (keyIndex >= 0 && keyIndex < keyNames.length) {
String key = keyNames[keyIndex];
@Nullable Metadata.Entry entry = MetadataUtil.parseMdtaMetadataEntryFromIlst(ilst, atomPosition + atomSize, key);
if (entry != null) {
entries.add(entry);
}
} else {
Log.w(TAG, "Skipped metadata with unknown key index: " + keyIndex);
}
ilst.setPosition(atomPosition + atomSize);
}
return entries.isEmpty() ? null : new Metadata(entries);
}
use of androidx.media3.common.util.ParsableByteArray in project media by androidx.
the class AtomParsers method parseVideoSampleEntry.
// hdrStaticInfo is allocated using allocate() in allocateHdrStaticInfo().
@SuppressWarnings("ByteBufferBackingArray")
private static void parseVideoSampleEntry(ParsableByteArray parent, int atomType, int position, int size, int trackId, int rotationDegrees, @Nullable DrmInitData drmInitData, StsdData out, int entryIndex) throws ParserException {
parent.setPosition(position + Atom.HEADER_SIZE + StsdData.STSD_HEADER_SIZE);
parent.skipBytes(16);
int width = parent.readUnsignedShort();
int height = parent.readUnsignedShort();
boolean pixelWidthHeightRatioFromPasp = false;
float pixelWidthHeightRatio = 1;
parent.skipBytes(50);
int childPosition = parent.getPosition();
if (atomType == Atom.TYPE_encv) {
@Nullable Pair<Integer, TrackEncryptionBox> sampleEntryEncryptionData = parseSampleEntryEncryptionData(parent, position, size);
if (sampleEntryEncryptionData != null) {
atomType = sampleEntryEncryptionData.first;
drmInitData = drmInitData == null ? null : drmInitData.copyWithSchemeType(sampleEntryEncryptionData.second.schemeType);
out.trackEncryptionBoxes[entryIndex] = sampleEntryEncryptionData.second;
}
parent.setPosition(childPosition);
}
// TODO: Uncomment when [Internal: b/63092960] is fixed.
// else {
// drmInitData = null;
// }
@Nullable String mimeType = null;
if (atomType == Atom.TYPE_m1v_) {
mimeType = MimeTypes.VIDEO_MPEG;
} else if (atomType == Atom.TYPE_H263) {
mimeType = MimeTypes.VIDEO_H263;
}
@Nullable List<byte[]> initializationData = null;
@Nullable String codecs = null;
@Nullable byte[] projectionData = null;
@C.StereoMode int stereoMode = Format.NO_VALUE;
// HDR related metadata.
@C.ColorSpace int colorSpace = Format.NO_VALUE;
@C.ColorRange int colorRange = Format.NO_VALUE;
@C.ColorTransfer int colorTransfer = Format.NO_VALUE;
// The format of HDR static info is defined in CTA-861-G:2017, Table 45.
@Nullable ByteBuffer hdrStaticInfo = null;
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
if (childAtomSize == 0 && parent.getPosition() - position == size) {
// Handle optional terminating four zero bytes in MOV files.
break;
}
ExtractorUtil.checkContainerInput(childAtomSize > 0, "childAtomSize must be positive");
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_avcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H264;
parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
AvcConfig avcConfig = AvcConfig.parse(parent);
initializationData = avcConfig.initializationData;
out.nalUnitLengthFieldLength = avcConfig.nalUnitLengthFieldLength;
if (!pixelWidthHeightRatioFromPasp) {
pixelWidthHeightRatio = avcConfig.pixelWidthHeightRatio;
}
codecs = avcConfig.codecs;
} else if (childAtomType == Atom.TYPE_hvcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H265;
parent.setPosition(childStartPosition + Atom.HEADER_SIZE);
HevcConfig hevcConfig = HevcConfig.parse(parent);
initializationData = hevcConfig.initializationData;
out.nalUnitLengthFieldLength = hevcConfig.nalUnitLengthFieldLength;
if (!pixelWidthHeightRatioFromPasp) {
pixelWidthHeightRatio = hevcConfig.pixelWidthHeightRatio;
}
codecs = hevcConfig.codecs;
} else if (childAtomType == Atom.TYPE_dvcC || childAtomType == Atom.TYPE_dvvC) {
@Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent);
if (dolbyVisionConfig != null) {
codecs = dolbyVisionConfig.codecs;
mimeType = MimeTypes.VIDEO_DOLBY_VISION;
}
} else if (childAtomType == Atom.TYPE_vpcC) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = (atomType == Atom.TYPE_vp08) ? MimeTypes.VIDEO_VP8 : MimeTypes.VIDEO_VP9;
} else if (childAtomType == Atom.TYPE_av1C) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_AV1;
} else if (childAtomType == Atom.TYPE_clli) {
if (hdrStaticInfo == null) {
hdrStaticInfo = allocateHdrStaticInfo();
}
// The contents of the clli box occupy the last 4 bytes of the HDR static info array. Note
// that each field is read in big endian and written in little endian.
hdrStaticInfo.position(21);
// max_content_light_level.
hdrStaticInfo.putShort(parent.readShort());
// max_pic_average_light_level.
hdrStaticInfo.putShort(parent.readShort());
} else if (childAtomType == Atom.TYPE_mdcv) {
if (hdrStaticInfo == null) {
hdrStaticInfo = allocateHdrStaticInfo();
}
// The contents of the mdcv box occupy 20 bytes after the first byte of the HDR static info
// array. Note that each field is read in big endian and written in little endian.
short displayPrimariesGX = parent.readShort();
short displayPrimariesGY = parent.readShort();
short displayPrimariesBX = parent.readShort();
short displayPrimariesBY = parent.readShort();
short displayPrimariesRX = parent.readShort();
short displayPrimariesRY = parent.readShort();
short whitePointX = parent.readShort();
short whitePointY = parent.readShort();
long maxDisplayMasteringLuminance = parent.readUnsignedInt();
long minDisplayMasteringLuminance = parent.readUnsignedInt();
hdrStaticInfo.position(1);
hdrStaticInfo.putShort(displayPrimariesRX);
hdrStaticInfo.putShort(displayPrimariesRY);
hdrStaticInfo.putShort(displayPrimariesGX);
hdrStaticInfo.putShort(displayPrimariesGY);
hdrStaticInfo.putShort(displayPrimariesBX);
hdrStaticInfo.putShort(displayPrimariesBY);
hdrStaticInfo.putShort(whitePointX);
hdrStaticInfo.putShort(whitePointY);
hdrStaticInfo.putShort((short) (maxDisplayMasteringLuminance / 10000));
hdrStaticInfo.putShort((short) (minDisplayMasteringLuminance / 10000));
} else if (childAtomType == Atom.TYPE_d263) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
mimeType = MimeTypes.VIDEO_H263;
} else if (childAtomType == Atom.TYPE_esds) {
ExtractorUtil.checkContainerInput(mimeType == null, /* message= */
null);
Pair<@NullableType String, byte @NullableType []> mimeTypeAndInitializationDataBytes = parseEsdsFromParent(parent, childStartPosition);
mimeType = mimeTypeAndInitializationDataBytes.first;
@Nullable byte[] initializationDataBytes = mimeTypeAndInitializationDataBytes.second;
if (initializationDataBytes != null) {
initializationData = ImmutableList.of(initializationDataBytes);
}
} else if (childAtomType == Atom.TYPE_pasp) {
pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
pixelWidthHeightRatioFromPasp = true;
} else if (childAtomType == Atom.TYPE_sv3d) {
projectionData = parseProjFromParent(parent, childStartPosition, childAtomSize);
} else if (childAtomType == Atom.TYPE_st3d) {
int version = parent.readUnsignedByte();
// Flags.
parent.skipBytes(3);
if (version == 0) {
int layout = parent.readUnsignedByte();
switch(layout) {
case 0:
stereoMode = C.STEREO_MODE_MONO;
break;
case 1:
stereoMode = C.STEREO_MODE_TOP_BOTTOM;
break;
case 2:
stereoMode = C.STEREO_MODE_LEFT_RIGHT;
break;
case 3:
stereoMode = C.STEREO_MODE_STEREO_MESH;
break;
default:
break;
}
}
} else if (childAtomType == Atom.TYPE_colr) {
int colorType = parent.readInt();
if (colorType == TYPE_nclx || colorType == TYPE_nclc) {
// For more info on syntax, see Section 8.5.2.2 in ISO/IEC 14496-12:2012(E) and
// https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFChap3/qtff3.html.
int colorPrimaries = parent.readUnsignedShort();
int transferCharacteristics = parent.readUnsignedShort();
// matrix_coefficients.
parent.skipBytes(2);
// Only try and read full_range_flag if the box is long enough. It should be present in
// all colr boxes with type=nclx (Section 8.5.2.2 in ISO/IEC 14496-12:2012(E)) but some
// device cameras record videos with type=nclx without this final flag (and therefore
// size=18): https://github.com/google/ExoPlayer/issues/9332
boolean fullRangeFlag = childAtomSize == 19 && (parent.readUnsignedByte() & 0b10000000) != 0;
colorSpace = ColorInfo.isoColorPrimariesToColorSpace(colorPrimaries);
colorRange = fullRangeFlag ? C.COLOR_RANGE_FULL : C.COLOR_RANGE_LIMITED;
colorTransfer = ColorInfo.isoTransferCharacteristicsToColorTransfer(transferCharacteristics);
} else {
Log.w(TAG, "Unsupported color type: " + Atom.getAtomTypeString(colorType));
}
}
childPosition += childAtomSize;
}
// If the media type was not recognized, ignore the track.
if (mimeType == null) {
return;
}
Format.Builder formatBuilder = new Format.Builder().setId(trackId).setSampleMimeType(mimeType).setCodecs(codecs).setWidth(width).setHeight(height).setPixelWidthHeightRatio(pixelWidthHeightRatio).setRotationDegrees(rotationDegrees).setProjectionData(projectionData).setStereoMode(stereoMode).setInitializationData(initializationData).setDrmInitData(drmInitData);
if (colorSpace != Format.NO_VALUE || colorRange != Format.NO_VALUE || colorTransfer != Format.NO_VALUE || hdrStaticInfo != null) {
// Note that if either mdcv or clli are missing, we leave the corresponding HDR static
// metadata bytes with value zero. See [Internal ref: b/194535665].
formatBuilder.setColorInfo(new ColorInfo(colorSpace, colorRange, colorTransfer, hdrStaticInfo != null ? hdrStaticInfo.array() : null));
}
out.format = formatBuilder.build();
}
use of androidx.media3.common.util.ParsableByteArray in project media by androidx.
the class AtomParsers method parseSmta.
/**
* Parses metadata from a Samsung smta atom.
*
* <p>See [Internal: b/150138465#comment76].
*/
@Nullable
private static Metadata parseSmta(ParsableByteArray smta, int limit) {
smta.skipBytes(Atom.FULL_HEADER_SIZE);
while (smta.getPosition() < limit) {
int atomPosition = smta.getPosition();
int atomSize = smta.readInt();
int atomType = smta.readInt();
if (atomType == Atom.TYPE_saut) {
if (atomSize < 14) {
return null;
}
// author (4), reserved = 0 (1).
smta.skipBytes(5);
int recordingMode = smta.readUnsignedByte();
if (recordingMode != 12 && recordingMode != 13) {
return null;
}
float captureFrameRate = recordingMode == 12 ? 240 : 120;
// reserved = 1 (1).
smta.skipBytes(1);
int svcTemporalLayerCount = smta.readUnsignedByte();
return new Metadata(new SmtaMetadataEntry(captureFrameRate, svcTemporalLayerCount));
}
smta.setPosition(atomPosition + atomSize);
}
return null;
}
use of androidx.media3.common.util.ParsableByteArray in project media by androidx.
the class Mp3Extractor method maybeReadSeekFrame.
/**
* Consumes the next frame from the {@code input} if it contains VBRI or Xing seeking metadata,
* returning a {@link Seeker} if the metadata was present and valid, or {@code null} otherwise.
* After this method returns, the input position is the start of the first frame of audio.
*
* @param input The {@link ExtractorInput} from which to read.
* @return A {@link Seeker} if seeking metadata was present and valid, or {@code null} otherwise.
* @throws IOException Thrown if there was an error reading from the stream. Not expected if the
* next two frames were already peeked during synchronization.
*/
@Nullable
private Seeker maybeReadSeekFrame(ExtractorInput input) throws IOException {
ParsableByteArray frame = new ParsableByteArray(synchronizedHeader.frameSize);
input.peekFully(frame.getData(), 0, synchronizedHeader.frameSize);
int xingBase = (synchronizedHeader.version & 1) != 0 ? // MPEG 1
(synchronizedHeader.channels != 1 ? 36 : 21) : // MPEG 2 or 2.5
(synchronizedHeader.channels != 1 ? 21 : 13);
int seekHeader = getSeekFrameHeader(frame, xingBase);
@Nullable Seeker seeker;
if (seekHeader == SEEK_HEADER_XING || seekHeader == SEEK_HEADER_INFO) {
seeker = XingSeeker.create(input.getLength(), input.getPosition(), synchronizedHeader, frame);
if (seeker != null && !gaplessInfoHolder.hasGaplessInfo()) {
// If there is a Xing header, read gapless playback metadata at a fixed offset.
input.resetPeekPosition();
input.advancePeekPosition(xingBase + 141);
input.peekFully(scratch.getData(), 0, 3);
scratch.setPosition(0);
gaplessInfoHolder.setFromXingHeaderValue(scratch.readUnsignedInt24());
}
input.skipFully(synchronizedHeader.frameSize);
if (seeker != null && !seeker.isSeekable() && seekHeader == SEEK_HEADER_INFO) {
// Fall back to constant bitrate seeking for Info headers missing a table of contents.
return getConstantBitrateSeeker(input, /* allowSeeksIfLengthUnknown= */
false);
}
} else if (seekHeader == SEEK_HEADER_VBRI) {
seeker = VbriSeeker.create(input.getLength(), input.getPosition(), synchronizedHeader, frame);
input.skipFully(synchronizedHeader.frameSize);
} else {
// seekerHeader == SEEK_HEADER_UNSET
// This frame doesn't contain seeking information, so reset the peek position.
seeker = null;
input.resetPeekPosition();
}
return seeker;
}
use of androidx.media3.common.util.ParsableByteArray in project media by androidx.
the class OggExtractor method sniffInternal.
@EnsuresNonNullIf(expression = "streamReader", result = true)
private boolean sniffInternal(ExtractorInput input) throws IOException {
OggPageHeader header = new OggPageHeader();
if (!header.populate(input, true) || (header.type & 0x02) != 0x02) {
return false;
}
int length = min(header.bodySize, MAX_VERIFICATION_BYTES);
ParsableByteArray scratch = new ParsableByteArray(length);
input.peekFully(scratch.getData(), 0, length);
if (FlacReader.verifyBitstreamType(resetPosition(scratch))) {
streamReader = new FlacReader();
} else if (VorbisReader.verifyBitstreamType(resetPosition(scratch))) {
streamReader = new VorbisReader();
} else if (OpusReader.verifyBitstreamType(resetPosition(scratch))) {
streamReader = new OpusReader();
} else {
return false;
}
return true;
}
Aggregations