use of com.google.android.exoplayer.MediaFormat in project edx-app-android by edx.
the class FragmentedMp4Extractor method parseAvcFromParent.
private static Pair<MediaFormat, TrackEncryptionBox> parseAvcFromParent(ParsableByteArray parent, int position, int size) {
parent.setPosition(position + ATOM_HEADER_SIZE);
parent.skip(24);
int width = parent.readUnsignedShort();
int height = parent.readUnsignedShort();
parent.skip(50);
List<byte[]> initializationData = null;
TrackEncryptionBox trackEncryptionBox = null;
int childPosition = parent.getPosition();
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_avcC) {
initializationData = parseAvcCFromParent(parent, childStartPosition);
} else if (childAtomType == Atom.TYPE_sinf) {
trackEncryptionBox = parseSinfFromParent(parent, childStartPosition, childAtomSize);
}
childPosition += childAtomSize;
}
MediaFormat format = MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE, width, height, initializationData);
return Pair.create(format, trackEncryptionBox);
}
use of com.google.android.exoplayer.MediaFormat in project edx-app-android by edx.
the class FragmentedMp4Extractor method parseMp4aFromParent.
private static Pair<MediaFormat, TrackEncryptionBox> parseMp4aFromParent(ParsableByteArray parent, int position, int size) {
parent.setPosition(position + ATOM_HEADER_SIZE);
// Start of the mp4a atom (defined in 14496-14)
parent.skip(16);
int channelCount = parent.readUnsignedShort();
int sampleSize = parent.readUnsignedShort();
parent.skip(4);
int sampleRate = parent.readUnsignedFixedPoint1616();
byte[] initializationData = null;
TrackEncryptionBox trackEncryptionBox = null;
int childPosition = parent.getPosition();
while (childPosition - position < size) {
parent.setPosition(childPosition);
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_esds) {
initializationData = parseEsdsFromParent(parent, childStartPosition);
// TODO: Do we really need to do this? See [redacted]
// Update sampleRate and channelCount from the AudioSpecificConfig initialization data.
Pair<Integer, Integer> audioSpecificConfig = CodecSpecificDataUtil.parseAudioSpecificConfig(initializationData);
sampleRate = audioSpecificConfig.first;
channelCount = audioSpecificConfig.second;
} else if (childAtomType == Atom.TYPE_sinf) {
trackEncryptionBox = parseSinfFromParent(parent, childStartPosition, childAtomSize);
}
childPosition += childAtomSize;
}
MediaFormat format = MediaFormat.createAudioFormat("audio/mp4a-latm", sampleSize, channelCount, sampleRate, Collections.singletonList(initializationData));
return Pair.create(format, trackEncryptionBox);
}
use of com.google.android.exoplayer.MediaFormat in project edx-app-android by edx.
the class FragmentedMp4Extractor method parseStsd.
private static Pair<MediaFormat, TrackEncryptionBox[]> parseStsd(ParsableByteArray stsd) {
stsd.setPosition(FULL_ATOM_HEADER_SIZE);
int numberOfEntries = stsd.readInt();
MediaFormat mediaFormat = null;
TrackEncryptionBox[] trackEncryptionBoxes = new TrackEncryptionBox[numberOfEntries];
for (int i = 0; i < numberOfEntries; i++) {
int childStartPosition = stsd.getPosition();
int childAtomSize = stsd.readInt();
int childAtomType = stsd.readInt();
if (childAtomType == Atom.TYPE_avc1 || childAtomType == Atom.TYPE_avc3 || childAtomType == Atom.TYPE_encv) {
Pair<MediaFormat, TrackEncryptionBox> avc = parseAvcFromParent(stsd, childStartPosition, childAtomSize);
mediaFormat = avc.first;
trackEncryptionBoxes[i] = avc.second;
} else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca) {
Pair<MediaFormat, TrackEncryptionBox> mp4a = parseMp4aFromParent(stsd, childStartPosition, childAtomSize);
mediaFormat = mp4a.first;
trackEncryptionBoxes[i] = mp4a.second;
}
stsd.setPosition(childStartPosition + childAtomSize);
}
return Pair.create(mediaFormat, trackEncryptionBoxes);
}
use of com.google.android.exoplayer.MediaFormat in project edx-app-android by edx.
the class ChunkSampleSource method readData.
@Override
public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException {
Assertions.checkState(state == STATE_ENABLED);
Assertions.checkState(track == 0);
if (pendingDiscontinuity) {
pendingDiscontinuity = false;
return DISCONTINUITY_READ;
}
if (onlyReadDiscontinuity) {
return NOTHING_READ;
}
downstreamPositionUs = playbackPositionUs;
if (isPendingReset()) {
if (currentLoadableException != null) {
throw currentLoadableException;
}
IOException chunkSourceException = chunkSource.getError();
if (chunkSourceException != null) {
throw chunkSourceException;
}
return NOTHING_READ;
}
MediaChunk mediaChunk = mediaChunks.getFirst();
if (mediaChunk.isReadFinished()) {
// We've read all of the samples from the current media chunk.
if (mediaChunks.size() > 1) {
discardDownstreamMediaChunk();
mediaChunk = mediaChunks.getFirst();
mediaChunk.seekToStart();
return readData(track, playbackPositionUs, formatHolder, sampleHolder, false);
} else if (mediaChunk.isLastChunk()) {
return END_OF_STREAM;
}
IOException chunkSourceException = chunkSource.getError();
if (chunkSourceException != null) {
throw chunkSourceException;
}
return NOTHING_READ;
}
if (downstreamFormat == null || !downstreamFormat.equals(mediaChunk.format)) {
notifyDownstreamFormatChanged(mediaChunk.format.id, mediaChunk.trigger, mediaChunk.startTimeUs);
downstreamFormat = mediaChunk.format;
}
if (!mediaChunk.prepare()) {
if (currentLoadableException != null) {
throw currentLoadableException;
}
return NOTHING_READ;
}
MediaFormat mediaFormat = mediaChunk.getMediaFormat();
if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormat, true)) {
chunkSource.getMaxVideoDimensions(mediaFormat);
formatHolder.format = mediaFormat;
formatHolder.drmInitData = mediaChunk.getPsshInfo();
downstreamMediaFormat = mediaFormat;
return FORMAT_READ;
}
if (mediaChunk.read(sampleHolder)) {
sampleHolder.decodeOnly = frameAccurateSeeking && sampleHolder.timeUs < lastSeekPositionUs;
onSampleRead(mediaChunk, sampleHolder);
return SAMPLE_READ;
} else {
if (currentLoadableException != null) {
throw currentLoadableException;
}
return NOTHING_READ;
}
}
use of com.google.android.exoplayer.MediaFormat in project zype-android by zype.
the class PlayerFragment method showClosedCaptionsDialog.
private void showClosedCaptionsDialog() {
if (player == null) {
return;
}
// Get CC tracks
final List<CharSequence> tracks = new ArrayList<>();
for (int i = 0; i < player.getTrackCount(CustomPlayer.TYPE_TEXT); i++) {
MediaFormat mediaFormat = player.getTrackFormat(CustomPlayer.TYPE_TEXT, i);
tracks.add(mediaFormat.trackId);
}
tracks.add(getString(R.string.subtitles_off));
int selectedIndex;
if (ccEnabled) {
selectedIndex = getClosedCaptionsTrackIndex(SettingsProvider.getInstance().getString(SettingsProvider.SELECTED_CLOSED_CAPTIONS_TRACK));
} else {
selectedIndex = tracks.size() - 1;
}
// Show selection dialog
SubtitlesDialogFragment.createAndShowSubtitlesDialogFragment(getActivity(), "Select track", tracks.toArray(new CharSequence[tracks.size()]), selectedIndex, new SubtitlesDialogFragment.ISubtitlesDialogListener() {
@Override
public void onItemSelected(SubtitlesDialogFragment dialog, int selectedItem) {
if (selectedItem == tracks.size() - 1) {
ccEnabled = false;
ccTrack = "";
} else {
ccEnabled = true;
ccTrack = tracks.get(selectedItem).toString();
}
SettingsProvider.getInstance().setBoolean(SettingsProvider.CLOSED_CAPTIONS_ENABLED, ccEnabled);
SettingsProvider.getInstance().setString(SettingsProvider.SELECTED_CLOSED_CAPTIONS_TRACK, ccTrack);
updateClosedCaptionsTrack();
getActivity().invalidateOptionsMenu();
dialog.dismiss();
}
});
}
Aggregations