use of android.media.MediaFormat in project android_frameworks_base by crdroidandroid.
the class DisplaySinkService method updateSurfaceFromUi.
private void updateSurfaceFromUi(SurfaceHolder holder) {
Surface surface = null;
int width = 0, height = 0;
if (holder != null && !holder.isCreating()) {
surface = holder.getSurface();
if (surface.isValid()) {
final Rect frame = holder.getSurfaceFrame();
width = frame.width();
height = frame.height();
} else {
surface = null;
}
}
synchronized (mSurfaceAndCodecLock) {
if (mSurface == surface && mSurfaceWidth == width && mSurfaceHeight == height) {
return;
}
mSurface = surface;
mSurfaceWidth = width;
mSurfaceHeight = height;
if (mCodec != null) {
mCodec.stop();
mCodec = null;
mCodecInputBuffers = null;
mCodecBufferInfo = null;
}
if (mSurface != null) {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", mSurfaceWidth, mSurfaceHeight);
try {
mCodec = MediaCodec.createDecoderByType("video/avc");
} catch (IOException e) {
throw new RuntimeException("failed to create video/avc decoder", e);
}
mCodec.configure(format, mSurface, null, 0);
mCodec.start();
mCodecBufferInfo = new BufferInfo();
}
mTransportHandler.post(new Runnable() {
@Override
public void run() {
sendSinkStatus();
}
});
}
}
use of android.media.MediaFormat in project android_frameworks_base by crdroidandroid.
the class MediaDecoder method onStart.
private void onStart() throws Exception {
if (mOpenGLEnabled) {
getRenderTarget().focus();
}
mMediaExtractor = new MediaExtractor();
mMediaExtractor.setDataSource(mContext, mUri, null);
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
for (int i = 0; i < mMediaExtractor.getTrackCount(); i++) {
MediaFormat format = mMediaExtractor.getTrackFormat(i);
if (DEBUG) {
Log.i(LOG_TAG, "Uri " + mUri + ", track " + i + ": " + format);
}
if (DecoderUtil.isVideoFormat(format) && mVideoTrackIndex == -1) {
mVideoTrackIndex = i;
} else if (DecoderUtil.isAudioFormat(format) && mAudioTrackIndex == -1) {
mAudioTrackIndex = i;
}
}
if (mVideoTrackIndex == -1 && mAudioTrackIndex == -1) {
throw new IllegalArgumentException("Couldn't find a video or audio track in the provided file");
}
if (mVideoTrackIndex != -1) {
MediaFormat videoFormat = mMediaExtractor.getTrackFormat(mVideoTrackIndex);
mVideoTrackDecoder = mOpenGLEnabled ? new GpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this) : new CpuVideoTrackDecoder(mVideoTrackIndex, videoFormat, this);
mVideoTrackDecoder.init();
mMediaExtractor.selectTrack(mVideoTrackIndex);
if (Build.VERSION.SDK_INT >= 17) {
retrieveDefaultRotation();
}
}
if (mAudioTrackIndex != -1) {
MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
mAudioTrackDecoder.init();
mMediaExtractor.selectTrack(mAudioTrackIndex);
}
if (mStartMicros > 0) {
mMediaExtractor.seekTo(mStartMicros, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
}
mStarted = true;
mListener.onDecodingStarted();
}
use of android.media.MediaFormat in project buglife-android by Buglife.
the class ScreenFileEncoder method setUpEncoder.
private void setUpEncoder() {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
// Set some required properties. The media codec may fail if these aren't defined.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_ENCODING_BITRATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, DEFAULT_MEDIA_CODEC_FRAME_RATE);
format.setInteger(MediaFormat.KEY_CAPTURE_RATE, DEFAULT_MEDIA_CODEC_FRAME_RATE);
format.setInteger(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, 1000000 / DEFAULT_MEDIA_CODEC_FRAME_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 0);
// 1 seconds between I-frames
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
// Input buffer will be filled via MediaProjection
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
ByteBuffer encodedData = codec.getOutputBuffer(index);
if (encodedData == null) {
throw new RuntimeException("couldn't fetch buffer at index " + index);
}
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
info.size = 0;
}
if (info.size != 0) {
if (mMuxerStarted) {
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, info);
}
}
codec.releaseOutputBuffer(index, false);
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException error) {
error.printStackTrace();
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
// should happen before receiving buffers, and should only happen once
if (mTrackIndex >= 0) {
throw new RuntimeException("format changed twice");
}
mTrackIndex = mMuxer.addTrack(codec.getOutputFormat());
if (mTrackIndex >= 0) {
startMuxer();
}
}
});
mSurface = mEncoder.createInputSurface();
mMuxer = new MediaMuxer(mOutputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
e.printStackTrace();
}
}
use of android.media.MediaFormat in project android_frameworks_base by ResurrectionRemix.
the class MediaPlayer method addSubtitleSource.
/** @hide */
public void addSubtitleSource(InputStream is, MediaFormat format) throws IllegalStateException {
final InputStream fIs = is;
final MediaFormat fFormat = format;
if (is != null) {
// way to implement timeouts in the future.
synchronized (mOpenSubtitleSources) {
mOpenSubtitleSources.add(is);
}
} else {
Log.w(TAG, "addSubtitleSource called with null InputStream");
}
getMediaTimeProvider();
// process each subtitle in its own thread
final HandlerThread thread = new HandlerThread("SubtitleReadThread", Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
thread.start();
Handler handler = new Handler(thread.getLooper());
handler.post(new Runnable() {
private int addTrack() {
if (fIs == null || mSubtitleController == null) {
return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
}
SubtitleTrack track = mSubtitleController.addTrack(fFormat);
if (track == null) {
return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
}
// TODO: do the conversion in the subtitle track
Scanner scanner = new Scanner(fIs, "UTF-8");
String contents = scanner.useDelimiter("\\A").next();
synchronized (mOpenSubtitleSources) {
mOpenSubtitleSources.remove(fIs);
}
scanner.close();
synchronized (mIndexTrackPairs) {
mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
}
Handler h = mTimeProvider.mEventHandler;
int what = TimeProvider.NOTIFY;
int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, contents.getBytes());
Message m = h.obtainMessage(what, arg1, 0, trackData);
h.sendMessage(m);
return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
}
public void run() {
int res = addTrack();
if (mEventHandler != null) {
Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
mEventHandler.sendMessage(m);
}
thread.getLooper().quitSafely();
}
});
}
use of android.media.MediaFormat in project Camera-Roll-Android-App by kollerlukas.
the class Video method retrieveFrameRate.
public int retrieveFrameRate() {
MediaExtractor extractor = new MediaExtractor();
int frameRate = -1;
try {
// Adjust data source as per the requirement if file, URI, etc.
extractor.setDataSource(getPath());
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
frameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
// Release stuff
extractor.release();
}
return frameRate;
}
Aggregations