use of android.media.MediaRecorder in project libstreaming by fyhertz.
the class AudioStream method encodeWithMediaRecorder.
@Override
protected void encodeWithMediaRecorder() throws IOException {
// We need a local socket to forward data output by the camera to the packetizer
createSockets();
Log.v(TAG, "Requested audio with " + mQuality.bitRate / 1000 + "kbps" + " at " + mQuality.samplingRate / 1000 + "kHz");
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(mAudioSource);
mMediaRecorder.setOutputFormat(mOutputFormat);
mMediaRecorder.setAudioEncoder(mAudioEncoder);
mMediaRecorder.setAudioChannels(1);
mMediaRecorder.setAudioSamplingRate(mQuality.samplingRate);
mMediaRecorder.setAudioEncodingBitRate(mQuality.bitRate);
// We write the output of the camera in a local socket instead of a file !
// This one little trick makes streaming feasible quiet simply: data from the camera
// can then be manipulated at the other end of the socket
FileDescriptor fd = null;
if (sPipeApi == PIPE_API_PFD) {
fd = mParcelWrite.getFileDescriptor();
} else {
fd = mSender.getFileDescriptor();
}
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.setOutputFile(fd);
mMediaRecorder.prepare();
mMediaRecorder.start();
InputStream is = null;
if (sPipeApi == PIPE_API_PFD) {
is = new ParcelFileDescriptor.AutoCloseInputStream(mParcelRead);
} else {
try {
// mReceiver.getInputStream contains the data from the camera
is = mReceiver.getInputStream();
} catch (IOException e) {
stop();
throw new IOException("Something happened with the local sockets :/ Start failed !");
}
}
// the mPacketizer encapsulates this stream in an RTP stream and send it over the network
mPacketizer.setInputStream(is);
mPacketizer.start();
mStreaming = true;
}
use of android.media.MediaRecorder in project libstreaming by fyhertz.
the class H264Stream method testMediaRecorderAPI.
// Should not be called by the UI thread
private MP4Config testMediaRecorderAPI() throws RuntimeException, IOException {
String key = PREF_PREFIX + "h264-mr-" + mRequestedQuality.framerate + "," + mRequestedQuality.resX + "," + mRequestedQuality.resY;
if (mSettings != null && mSettings.contains(key)) {
String[] s = mSettings.getString(key, "").split(",");
return new MP4Config(s[0], s[1], s[2]);
}
if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
throw new StorageUnavailableException("No external storage or external storage not ready !");
}
final String TESTFILE = Environment.getExternalStorageDirectory().getPath() + "/spydroid-test.mp4";
Log.i(TAG, "Testing H264 support... Test file saved at: " + TESTFILE);
try {
File file = new File(TESTFILE);
file.createNewFile();
} catch (IOException e) {
throw new StorageUnavailableException(e.getMessage());
}
// Save flash state & set it to false so that led remains off while testing h264
boolean savedFlashState = mFlashEnabled;
mFlashEnabled = false;
boolean previewStarted = mPreviewStarted;
boolean cameraOpen = mCamera != null;
createCamera();
// Stops the preview if needed
if (mPreviewStarted) {
lockCamera();
try {
mCamera.stopPreview();
} catch (Exception e) {
}
mPreviewStarted = false;
}
try {
Thread.sleep(100);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
unlockCamera();
try {
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setVideoEncoder(mVideoEncoder);
mMediaRecorder.setPreviewDisplay(mSurfaceView.getHolder().getSurface());
mMediaRecorder.setVideoSize(mRequestedQuality.resX, mRequestedQuality.resY);
mMediaRecorder.setVideoFrameRate(mRequestedQuality.framerate);
mMediaRecorder.setVideoEncodingBitRate((int) (mRequestedQuality.bitrate * 0.8));
mMediaRecorder.setOutputFile(TESTFILE);
mMediaRecorder.setMaxDuration(3000);
// We wait a little and stop recording
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
public void onInfo(MediaRecorder mr, int what, int extra) {
Log.d(TAG, "MediaRecorder callback called !");
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
Log.d(TAG, "MediaRecorder: MAX_DURATION_REACHED");
} else if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
Log.d(TAG, "MediaRecorder: MAX_FILESIZE_REACHED");
} else if (what == MediaRecorder.MEDIA_RECORDER_INFO_UNKNOWN) {
Log.d(TAG, "MediaRecorder: INFO_UNKNOWN");
} else {
Log.d(TAG, "WTF ?");
}
mLock.release();
}
});
// Start recording
mMediaRecorder.prepare();
mMediaRecorder.start();
if (mLock.tryAcquire(6, TimeUnit.SECONDS)) {
Log.d(TAG, "MediaRecorder callback was called :)");
Thread.sleep(400);
} else {
Log.d(TAG, "MediaRecorder callback was not called after 6 seconds... :(");
}
} catch (IOException e) {
throw new ConfNotSupportedException(e.getMessage());
} catch (RuntimeException e) {
throw new ConfNotSupportedException(e.getMessage());
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
try {
mMediaRecorder.stop();
} catch (Exception e) {
}
mMediaRecorder.release();
mMediaRecorder = null;
lockCamera();
if (!cameraOpen)
destroyCamera();
// Restore flash state
mFlashEnabled = savedFlashState;
if (previewStarted) {
// If the preview was started before the test, we try to restart it.
try {
startPreview();
} catch (Exception e) {
}
}
}
// Retrieve SPS & PPS & ProfileId with MP4Config
MP4Config config = new MP4Config(TESTFILE);
// Delete dummy video
File file = new File(TESTFILE);
if (!file.delete())
Log.e(TAG, "Temp file could not be erased");
Log.i(TAG, "H264 Test succeded...");
// Save test result
if (mSettings != null) {
Editor editor = mSettings.edit();
editor.putString(key, config.getProfileLevel() + "," + config.getB64SPS() + "," + config.getB64PPS());
editor.commit();
}
return config;
}
use of android.media.MediaRecorder in project android_frameworks_base by DirtyUnicorns.
the class MediaPlayerPerformance method stressVideoRecord.
// Note: This test is to assume the mediaserver's pid is 34
private boolean stressVideoRecord(int frameRate, int width, int height, int videoFormat, int outFormat, String outFile, boolean videoOnly) {
// Video recording
boolean doesTestFail = false;
for (int i = 0; i < NUM_PLAYBACk_IN_EACH_LOOP; i++) {
MediaRecorder mRecorder = new MediaRecorder();
try {
if (!videoOnly) {
Log.v(TAG, "setAudioSource");
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
}
mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mRecorder.setOutputFormat(outFormat);
Log.v(TAG, "output format " + outFormat);
mRecorder.setOutputFile(outFile);
mRecorder.setVideoFrameRate(frameRate);
mRecorder.setVideoSize(width, height);
Log.v(TAG, "setEncoder");
mRecorder.setVideoEncoder(videoFormat);
if (!videoOnly) {
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
}
mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder();
mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
mRecorder.prepare();
mRecorder.start();
Thread.sleep(MEDIA_STRESS_WAIT_TIME);
mRecorder.stop();
mRecorder.release();
//Insert 2 seconds to make sure the camera released.
Thread.sleep(SHORT_WAIT);
} catch (Exception e) {
Log.v("record video failed ", e.toString());
mRecorder.release();
doesTestFail = true;
break;
}
}
return !doesTestFail;
}
use of android.media.MediaRecorder in project android_frameworks_base by DirtyUnicorns.
the class Camera2RecordingTest method constrainedHighSpeedRecording.
private void constrainedHighSpeedRecording() throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing constrained high speed recording for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
continue;
}
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Constrained high speed recording: %d/%d", iteration + 1, getIterationCount()));
StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
for (Size size : highSpeedVideoSizes) {
List<Range<Integer>> fixedFpsRanges = getHighSpeedFixedFpsRangeForSize(config, size);
mCollector.expectTrue("Unable to find the fixed frame rate fps range for " + "size " + size, fixedFpsRanges.size() > 0);
// Test recording for each FPS range
for (Range<Integer> fpsRange : fixedFpsRanges) {
int captureRate = fpsRange.getLower();
final int VIDEO_FRAME_RATE = 30;
// Skip the test if the highest recording FPS supported by CamcorderProfile
if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps" + " is not supported by CamcorderProfile");
continue;
}
mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate + "fps_" + id + "_" + size.toString() + ".mp4";
prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(size, captureRate);
// Start recording
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
startSlowMotionRecording(/*useMediaRecorder*/
true, VIDEO_FRAME_RATE, captureRate, fpsRange, resultListener, /*useHighSpeedSession*/
true);
// Record certain duration.
SystemClock.sleep(RECORDING_DURATION_MS);
// Stop recording and preview
stopRecording(/*useMediaRecorder*/
true);
// Convert number of frames camera produced into the duration in unit of ms.
int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f / VIDEO_FRAME_RATE);
// Validation.
validateRecording(size, durationMs);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.media.MediaRecorder in project android_frameworks_base by DirtyUnicorns.
the class MediaEncoderFilter method startRecording.
private void startRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Starting recording");
// Create a frame representing the screen
MutableFrameFormat screenFormat = new MutableFrameFormat(FrameFormat.TYPE_BYTE, FrameFormat.TARGET_GPU);
screenFormat.setBytesPerSample(4);
int width, height;
boolean widthHeightSpecified = mWidth > 0 && mHeight > 0;
// of that in the profile.
if (mProfile != null && !widthHeightSpecified) {
width = mProfile.videoFrameWidth;
height = mProfile.videoFrameHeight;
} else {
width = mWidth;
height = mHeight;
}
screenFormat.setDimensions(width, height);
mScreen = (GLFrame) context.getFrameManager().newBoundFrame(screenFormat, GLFrame.EXISTING_FBO_BINDING, 0);
// Initialize the media recorder
mMediaRecorder = new MediaRecorder();
updateMediaRecorderParams();
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException("IOException in" + "MediaRecorder.prepare()!", e);
} catch (Exception e) {
throw new RuntimeException("Unknown Exception in" + "MediaRecorder.prepare()!", e);
}
// Make sure start() is called before trying to
// register the surface. The native window handle needed to create
// the surface is initiated in start()
mMediaRecorder.start();
if (mLogVerbose)
Log.v(TAG, "Open: registering surface from Mediarecorder");
mSurfaceId = context.getGLEnvironment().registerSurfaceFromMediaRecorder(mMediaRecorder);
mNumFramesEncoded = 0;
mRecordingActive = true;
}
Aggregations