use of androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry in project ExoPlayer by google.
the class SefSlowMotionFlattener method getMetadataInfo.
/**
* Returns the {@link MetadataInfo} derived from the {@link Metadata} provided.
*/
private static MetadataInfo getMetadataInfo(@Nullable Metadata metadata) {
MetadataInfo metadataInfo = new MetadataInfo();
if (metadata == null) {
return metadataInfo;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
SmtaMetadataEntry smtaMetadataEntry = (SmtaMetadataEntry) entry;
metadataInfo.captureFrameRate = smtaMetadataEntry.captureFrameRate;
metadataInfo.inputMaxLayer = smtaMetadataEntry.svcTemporalLayerCount - 1;
} else if (entry instanceof SlowMotionData) {
metadataInfo.slowMotionData = (SlowMotionData) entry;
}
}
if (metadataInfo.slowMotionData == null) {
return metadataInfo;
}
checkState(metadataInfo.inputMaxLayer != C.INDEX_UNSET, "SVC temporal layer count not found.");
checkState(metadataInfo.captureFrameRate != C.RATE_UNSET, "Capture frame rate not found.");
checkState(metadataInfo.captureFrameRate % 1 == 0 && metadataInfo.captureFrameRate % TARGET_OUTPUT_FRAME_RATE == 0, "Invalid capture frame rate: " + metadataInfo.captureFrameRate);
int frameCountDivisor = (int) metadataInfo.captureFrameRate / TARGET_OUTPUT_FRAME_RATE;
int normalSpeedMaxLayer = metadataInfo.inputMaxLayer;
while (normalSpeedMaxLayer >= 0) {
if ((frameCountDivisor & 1) == 1) {
// Set normalSpeedMaxLayer only if captureFrameRate / TARGET_OUTPUT_FRAME_RATE is a power of
// 2. Otherwise, the target output frame rate cannot be reached because removing a layer
// divides the number of frames by 2.
checkState(frameCountDivisor >> 1 == 0, "Could not compute normal speed max SVC layer for capture frame rate " + metadataInfo.captureFrameRate);
metadataInfo.normalSpeedMaxLayer = normalSpeedMaxLayer;
break;
}
frameCountDivisor >>= 1;
normalSpeedMaxLayer--;
}
return metadataInfo;
}
use of androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry in project media by androidx.
the class MetadataRetrieverTest method retrieveMetadata_sefSlowMotion_outputsExpectedMetadata.
@Test
public void retrieveMetadata_sefSlowMotion_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem = MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_sef_slow_motion.mp4"));
SmtaMetadataEntry expectedSmtaEntry = new SmtaMetadataEntry(/* captureFrameRate= */
240, /* svcTemporalLayerCount= */
4);
List<SlowMotionData.Segment> segments = new ArrayList<>();
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
88, /* endTimeMs= */
879, /* speedDivisor= */
2));
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
1255, /* endTimeMs= */
1970, /* speedDivisor= */
8));
SlowMotionData expectedSlowMotionData = new SlowMotionData(segments);
MdtaMetadataEntry expectedMdtaEntry = new MdtaMetadataEntry(KEY_ANDROID_CAPTURE_FPS, /* value= */
new byte[] { 67, 112, 0, 0 }, /* localeIndicator= */
0, /* typeIndicator= */
23);
ListenableFuture<TrackGroupArray> trackGroupsFuture = retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
// Video and audio
assertThat(trackGroups.length).isEqualTo(2);
// Audio
assertThat(trackGroups.get(0).getFormat(0).metadata.length()).isEqualTo(2);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(0)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(1)).isEqualTo(expectedSlowMotionData);
// Video
assertThat(trackGroups.get(1).getFormat(0).metadata.length()).isEqualTo(3);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(0)).isEqualTo(expectedMdtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(1)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(2)).isEqualTo(expectedSlowMotionData);
}
use of androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry in project media by androidx.
the class AtomParsers method parseSmta.
/**
* Parses metadata from a Samsung smta atom.
*
* <p>See [Internal: b/150138465#comment76].
*/
@Nullable
private static Metadata parseSmta(ParsableByteArray smta, int limit) {
smta.skipBytes(Atom.FULL_HEADER_SIZE);
while (smta.getPosition() < limit) {
int atomPosition = smta.getPosition();
int atomSize = smta.readInt();
int atomType = smta.readInt();
if (atomType == Atom.TYPE_saut) {
if (atomSize < 14) {
return null;
}
// author (4), reserved = 0 (1).
smta.skipBytes(5);
int recordingMode = smta.readUnsignedByte();
if (recordingMode != 12 && recordingMode != 13) {
return null;
}
float captureFrameRate = recordingMode == 12 ? 240 : 120;
// reserved = 1 (1).
smta.skipBytes(1);
int svcTemporalLayerCount = smta.readUnsignedByte();
return new Metadata(new SmtaMetadataEntry(captureFrameRate, svcTemporalLayerCount));
}
smta.setPosition(atomPosition + atomSize);
}
return null;
}
use of androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry in project media by androidx.
the class SefSlowMotionFlattener method getMetadataInfo.
/**
* Returns the {@link MetadataInfo} derived from the {@link Metadata} provided.
*/
private static MetadataInfo getMetadataInfo(@Nullable Metadata metadata) {
MetadataInfo metadataInfo = new MetadataInfo();
if (metadata == null) {
return metadataInfo;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
SmtaMetadataEntry smtaMetadataEntry = (SmtaMetadataEntry) entry;
metadataInfo.captureFrameRate = smtaMetadataEntry.captureFrameRate;
metadataInfo.inputMaxLayer = smtaMetadataEntry.svcTemporalLayerCount - 1;
} else if (entry instanceof SlowMotionData) {
metadataInfo.slowMotionData = (SlowMotionData) entry;
}
}
if (metadataInfo.slowMotionData == null) {
return metadataInfo;
}
checkState(metadataInfo.inputMaxLayer != C.INDEX_UNSET, "SVC temporal layer count not found.");
checkState(metadataInfo.captureFrameRate != C.RATE_UNSET, "Capture frame rate not found.");
checkState(metadataInfo.captureFrameRate % 1 == 0 && metadataInfo.captureFrameRate % TARGET_OUTPUT_FRAME_RATE == 0, "Invalid capture frame rate: " + metadataInfo.captureFrameRate);
int frameCountDivisor = (int) metadataInfo.captureFrameRate / TARGET_OUTPUT_FRAME_RATE;
int normalSpeedMaxLayer = metadataInfo.inputMaxLayer;
while (normalSpeedMaxLayer >= 0) {
if ((frameCountDivisor & 1) == 1) {
// Set normalSpeedMaxLayer only if captureFrameRate / TARGET_OUTPUT_FRAME_RATE is a power of
// 2. Otherwise, the target output frame rate cannot be reached because removing a layer
// divides the number of frames by 2.
checkState(frameCountDivisor >> 1 == 0, "Could not compute normal speed max SVC layer for capture frame rate " + metadataInfo.captureFrameRate);
metadataInfo.normalSpeedMaxLayer = normalSpeedMaxLayer;
break;
}
frameCountDivisor >>= 1;
normalSpeedMaxLayer--;
}
return metadataInfo;
}
use of androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry in project ExoPlayer by google.
the class MetadataRetrieverTest method retrieveMetadata_sefSlowMotion_outputsExpectedMetadata.
@Test
public void retrieveMetadata_sefSlowMotion_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem = MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_sef_slow_motion.mp4"));
SmtaMetadataEntry expectedSmtaEntry = new SmtaMetadataEntry(/* captureFrameRate= */
240, /* svcTemporalLayerCount= */
4);
List<SlowMotionData.Segment> segments = new ArrayList<>();
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
88, /* endTimeMs= */
879, /* speedDivisor= */
2));
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
1255, /* endTimeMs= */
1970, /* speedDivisor= */
8));
SlowMotionData expectedSlowMotionData = new SlowMotionData(segments);
MdtaMetadataEntry expectedMdtaEntry = new MdtaMetadataEntry(KEY_ANDROID_CAPTURE_FPS, /* value= */
new byte[] { 67, 112, 0, 0 }, /* localeIndicator= */
0, /* typeIndicator= */
23);
ListenableFuture<TrackGroupArray> trackGroupsFuture = retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
// Video and audio
assertThat(trackGroups.length).isEqualTo(2);
// Audio
assertThat(trackGroups.get(0).getFormat(0).metadata.length()).isEqualTo(2);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(0)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(1)).isEqualTo(expectedSlowMotionData);
// Video
assertThat(trackGroups.get(1).getFormat(0).metadata.length()).isEqualTo(3);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(0)).isEqualTo(expectedMdtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(1)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(2)).isEqualTo(expectedSlowMotionData);
}
Aggregations