use of android.media.MediaRecorder in project android_frameworks_base by ParanoidAndroid.
the class MediaEncoderFilter method startRecording.
private void startRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Starting recording");
// Create a frame representing the screen
MutableFrameFormat screenFormat = new MutableFrameFormat(FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU);
screenFormat.setBytesPerSample(4);
int width, height;
boolean widthHeightSpecified = mWidth > 0 && mHeight > 0;
// of that in the profile.
if (mProfile != null && !widthHeightSpecified) {
width = mProfile.videoFrameWidth;
height = mProfile.videoFrameHeight;
} else {
width = mWidth;
height = mHeight;
}
screenFormat.setDimensions(width, height);
mScreen = (GLFrame) context.getFrameManager().newBoundFrame(screenFormat, GLFrame.EXISTING_FBO_BINDING, 0);
// Initialize the media recorder
mMediaRecorder = new MediaRecorder();
updateMediaRecorderParams();
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException("IOException in" + "MediaRecorder.prepare()!", e);
} catch (Exception e) {
throw new RuntimeException("Unknown Exception in" + "MediaRecorder.prepare()!", e);
}
// Make sure start() is called before trying to
// register the surface. The native window handle needed to create
// the surface is initiated in start()
mMediaRecorder.start();
if (mLogVerbose)
Log.v(TAG, "Open: registering surface from Mediarecorder");
mSurfaceId = context.getGLEnvironment().registerSurfaceFromMediaRecorder(mMediaRecorder);
mNumFramesEncoded = 0;
mRecordingActive = true;
}
use of android.media.MediaRecorder in project android_frameworks_base by ParanoidAndroid.
the class MediaPlayerPerformance method stressAudioRecord.
public void stressAudioRecord(String filePath) {
// This test is only for the short media file
for (int i = 0; i < NUM_PLAYBACk_IN_EACH_LOOP; i++) {
MediaRecorder mRecorder = new MediaRecorder();
try {
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(filePath);
mRecorder.prepare();
mRecorder.start();
Thread.sleep(MEDIA_STRESS_WAIT_TIME);
mRecorder.stop();
mRecorder.release();
} catch (Exception e) {
Log.v(TAG, e.toString());
mRecorder.release();
}
}
}
use of android.media.MediaRecorder in project platform_frameworks_base by android.
the class MediaEncoderFilter method startRecording.
private void startRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Starting recording");
// Create a frame representing the screen
MutableFrameFormat screenFormat = new MutableFrameFormat(FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU);
screenFormat.setBytesPerSample(4);
int width, height;
boolean widthHeightSpecified = mWidth > 0 && mHeight > 0;
// of that in the profile.
if (mProfile != null && !widthHeightSpecified) {
width = mProfile.videoFrameWidth;
height = mProfile.videoFrameHeight;
} else {
width = mWidth;
height = mHeight;
}
screenFormat.setDimensions(width, height);
mScreen = (GLFrame) context.getFrameManager().newBoundFrame(screenFormat, GLFrame.EXISTING_FBO_BINDING, 0);
// Initialize the media recorder
mMediaRecorder = new MediaRecorder();
updateMediaRecorderParams();
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException("IOException in" + "MediaRecorder.prepare()!", e);
} catch (Exception e) {
throw new RuntimeException("Unknown Exception in" + "MediaRecorder.prepare()!", e);
}
// Make sure start() is called before trying to
// register the surface. The native window handle needed to create
// the surface is initiated in start()
mMediaRecorder.start();
if (mLogVerbose)
Log.v(TAG, "Open: registering surface from Mediarecorder");
mSurfaceId = context.getGLEnvironment().registerSurfaceFromMediaRecorder(mMediaRecorder);
mNumFramesEncoded = 0;
mRecordingActive = true;
}
use of android.media.MediaRecorder in project platform_frameworks_base by android.
the class CodecTest method mediaRecorderRecord.
public static boolean mediaRecorderRecord(String filePath) {
Log.v(TAG, "SoundRecording - " + filePath);
//This test is only for the short media file
int duration = 0;
try {
MediaRecorder mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(filePath);
mRecorder.prepare();
mRecorder.start();
Thread.sleep(500);
mRecorder.stop();
Log.v(TAG, "sound recorded");
mRecorder.release();
} catch (Exception e) {
Log.v(TAG, e.toString());
}
//Verify the recorded file
MediaPlayer mp = new MediaPlayer();
try {
mp.setDataSource(filePath);
mp.prepare();
duration = mp.getDuration();
Log.v(TAG, "Duration " + duration);
mp.release();
} catch (Exception e) {
}
//Check the record media file length is greate than zero
if (duration > 0)
return true;
else
return false;
}
use of android.media.MediaRecorder in project platform_frameworks_base by android.
the class MediaRecorderTest method recordVideoFromSurface.
private boolean recordVideoFromSurface(int frameRate, int captureRate, int width, int height, int videoFormat, int outFormat, String outFile, boolean videoOnly, Surface persistentSurface) {
Log.v(TAG, "recordVideoFromSurface");
MediaRecorder recorder = new MediaRecorder();
// normal capture at 33ms / frame
int sleepTime = 33;
Surface surface = null;
try {
if (!videoOnly) {
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
}
recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
recorder.setOutputFormat(outFormat);
recorder.setOutputFile(outFile);
recorder.setVideoFrameRate(frameRate);
if (captureRate > 0) {
recorder.setCaptureRate(captureRate);
sleepTime = 1000 / captureRate;
}
recorder.setVideoSize(width, height);
recorder.setVideoEncoder(videoFormat);
if (!videoOnly) {
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
if (persistentSurface != null) {
Log.v(TAG, "using persistent surface");
surface = persistentSurface;
recorder.setInputSurface(surface);
}
recorder.prepare();
if (persistentSurface == null) {
surface = recorder.getSurface();
}
Paint paint = new Paint();
paint.setTextSize(16);
paint.setColor(Color.RED);
int i;
/* Test: draw 10 frames at 30fps before start
* these should be dropped and not causing malformed stream.
*/
for (i = 0; i < 10; i++) {
Canvas canvas = surface.lockCanvas(null);
int background = (i * 255 / 99);
canvas.drawARGB(255, background, background, background);
String text = "Frame #" + i;
canvas.drawText(text, 100, 100, paint);
surface.unlockCanvasAndPost(canvas);
Thread.sleep(sleepTime);
}
Log.v(TAG, "start");
recorder.start();
/* Test: draw another 90 frames at 30fps after start */
for (i = 10; i < 100; i++) {
Canvas canvas = surface.lockCanvas(null);
int background = (i * 255 / 99);
canvas.drawARGB(255, background, background, background);
String text = "Frame #" + i;
canvas.drawText(text, 100, 100, paint);
surface.unlockCanvasAndPost(canvas);
Thread.sleep(sleepTime);
}
Log.v(TAG, "stop");
recorder.stop();
} catch (Exception e) {
Log.v(TAG, "record video failed: " + e.toString());
return false;
} finally {
recorder.release();
// release surface if not using persistent surface
if (persistentSurface == null && surface != null) {
surface.release();
}
}
return true;
}
Aggregations