use of androidx.media3.extractor.metadata.mp4.SlowMotionData in project ExoPlayer by google.
the class SlowMotionDataTest method parcelable.
@Test
public void parcelable() {
List<SlowMotionData.Segment> segments = new ArrayList<>();
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
1000, /* endTimeMs= */
2000, /* speedDivisor= */
4));
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
2600, /* endTimeMs= */
4000, /* speedDivisor= */
8));
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
8765, /* endTimeMs= */
12485, /* speedDivisor= */
16));
SlowMotionData slowMotionDataToParcel = new SlowMotionData(segments);
Parcel parcel = Parcel.obtain();
slowMotionDataToParcel.writeToParcel(parcel, /* flags= */
0);
parcel.setDataPosition(0);
SlowMotionData slowMotionDataFromParcel = SlowMotionData.CREATOR.createFromParcel(parcel);
assertThat(slowMotionDataFromParcel).isEqualTo(slowMotionDataToParcel);
parcel.recycle();
}
use of androidx.media3.extractor.metadata.mp4.SlowMotionData in project ExoPlayer by google.
the class SegmentSpeedProvider method extractSlowMotionSegments.
private static ImmutableList<Segment> extractSlowMotionSegments(Format format) {
List<Segment> segments = new ArrayList<>();
@Nullable Metadata metadata = format.metadata;
if (metadata != null) {
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SlowMotionData) {
segments.addAll(((SlowMotionData) entry).segments);
}
}
}
return ImmutableList.sortedCopyOf(BY_START_THEN_END_THEN_DIVISOR, segments);
}
use of androidx.media3.extractor.metadata.mp4.SlowMotionData in project ExoPlayer by google.
the class SefSlowMotionFlattener method getMetadataInfo.
/**
* Returns the {@link MetadataInfo} derived from the {@link Metadata} provided.
*/
private static MetadataInfo getMetadataInfo(@Nullable Metadata metadata) {
MetadataInfo metadataInfo = new MetadataInfo();
if (metadata == null) {
return metadataInfo;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
SmtaMetadataEntry smtaMetadataEntry = (SmtaMetadataEntry) entry;
metadataInfo.captureFrameRate = smtaMetadataEntry.captureFrameRate;
metadataInfo.inputMaxLayer = smtaMetadataEntry.svcTemporalLayerCount - 1;
} else if (entry instanceof SlowMotionData) {
metadataInfo.slowMotionData = (SlowMotionData) entry;
}
}
if (metadataInfo.slowMotionData == null) {
return metadataInfo;
}
checkState(metadataInfo.inputMaxLayer != C.INDEX_UNSET, "SVC temporal layer count not found.");
checkState(metadataInfo.captureFrameRate != C.RATE_UNSET, "Capture frame rate not found.");
checkState(metadataInfo.captureFrameRate % 1 == 0 && metadataInfo.captureFrameRate % TARGET_OUTPUT_FRAME_RATE == 0, "Invalid capture frame rate: " + metadataInfo.captureFrameRate);
int frameCountDivisor = (int) metadataInfo.captureFrameRate / TARGET_OUTPUT_FRAME_RATE;
int normalSpeedMaxLayer = metadataInfo.inputMaxLayer;
while (normalSpeedMaxLayer >= 0) {
if ((frameCountDivisor & 1) == 1) {
// Set normalSpeedMaxLayer only if captureFrameRate / TARGET_OUTPUT_FRAME_RATE is a power of
// 2. Otherwise, the target output frame rate cannot be reached because removing a layer
// divides the number of frames by 2.
checkState(frameCountDivisor >> 1 == 0, "Could not compute normal speed max SVC layer for capture frame rate " + metadataInfo.captureFrameRate);
metadataInfo.normalSpeedMaxLayer = normalSpeedMaxLayer;
break;
}
frameCountDivisor >>= 1;
normalSpeedMaxLayer--;
}
return metadataInfo;
}
use of androidx.media3.extractor.metadata.mp4.SlowMotionData in project media by androidx.
the class MetadataRetrieverTest method retrieveMetadata_sefSlowMotion_outputsExpectedMetadata.
@Test
public void retrieveMetadata_sefSlowMotion_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem = MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_sef_slow_motion.mp4"));
SmtaMetadataEntry expectedSmtaEntry = new SmtaMetadataEntry(/* captureFrameRate= */
240, /* svcTemporalLayerCount= */
4);
List<SlowMotionData.Segment> segments = new ArrayList<>();
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
88, /* endTimeMs= */
879, /* speedDivisor= */
2));
segments.add(new SlowMotionData.Segment(/* startTimeMs= */
1255, /* endTimeMs= */
1970, /* speedDivisor= */
8));
SlowMotionData expectedSlowMotionData = new SlowMotionData(segments);
MdtaMetadataEntry expectedMdtaEntry = new MdtaMetadataEntry(KEY_ANDROID_CAPTURE_FPS, /* value= */
new byte[] { 67, 112, 0, 0 }, /* localeIndicator= */
0, /* typeIndicator= */
23);
ListenableFuture<TrackGroupArray> trackGroupsFuture = retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
// Video and audio
assertThat(trackGroups.length).isEqualTo(2);
// Audio
assertThat(trackGroups.get(0).getFormat(0).metadata.length()).isEqualTo(2);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(0)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(1)).isEqualTo(expectedSlowMotionData);
// Video
assertThat(trackGroups.get(1).getFormat(0).metadata.length()).isEqualTo(3);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(0)).isEqualTo(expectedMdtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(1)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(2)).isEqualTo(expectedSlowMotionData);
}
use of androidx.media3.extractor.metadata.mp4.SlowMotionData in project media by androidx.
the class SefSlowMotionFlattener method getMetadataInfo.
/**
* Returns the {@link MetadataInfo} derived from the {@link Metadata} provided.
*/
private static MetadataInfo getMetadataInfo(@Nullable Metadata metadata) {
MetadataInfo metadataInfo = new MetadataInfo();
if (metadata == null) {
return metadataInfo;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
SmtaMetadataEntry smtaMetadataEntry = (SmtaMetadataEntry) entry;
metadataInfo.captureFrameRate = smtaMetadataEntry.captureFrameRate;
metadataInfo.inputMaxLayer = smtaMetadataEntry.svcTemporalLayerCount - 1;
} else if (entry instanceof SlowMotionData) {
metadataInfo.slowMotionData = (SlowMotionData) entry;
}
}
if (metadataInfo.slowMotionData == null) {
return metadataInfo;
}
checkState(metadataInfo.inputMaxLayer != C.INDEX_UNSET, "SVC temporal layer count not found.");
checkState(metadataInfo.captureFrameRate != C.RATE_UNSET, "Capture frame rate not found.");
checkState(metadataInfo.captureFrameRate % 1 == 0 && metadataInfo.captureFrameRate % TARGET_OUTPUT_FRAME_RATE == 0, "Invalid capture frame rate: " + metadataInfo.captureFrameRate);
int frameCountDivisor = (int) metadataInfo.captureFrameRate / TARGET_OUTPUT_FRAME_RATE;
int normalSpeedMaxLayer = metadataInfo.inputMaxLayer;
while (normalSpeedMaxLayer >= 0) {
if ((frameCountDivisor & 1) == 1) {
// Set normalSpeedMaxLayer only if captureFrameRate / TARGET_OUTPUT_FRAME_RATE is a power of
// 2. Otherwise, the target output frame rate cannot be reached because removing a layer
// divides the number of frames by 2.
checkState(frameCountDivisor >> 1 == 0, "Could not compute normal speed max SVC layer for capture frame rate " + metadataInfo.captureFrameRate);
metadataInfo.normalSpeedMaxLayer = normalSpeedMaxLayer;
break;
}
frameCountDivisor >>= 1;
normalSpeedMaxLayer--;
}
return metadataInfo;
}
Aggregations