use of android.media.MediaRecorder in project android_frameworks_base by AOSPA.
the class Camera2RecordingTest method videoSnapshotHelper.
/**
* Simple wrapper to wrap normal/burst video snapshot tests
*/
private void videoSnapshotHelper(boolean burstTest) throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing video snapshot for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isColorOutputSupported()) {
Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
continue;
}
initSupportedVideoSize(id);
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Video snapshot: %d/%d", iteration + 1, getIterationCount()));
videoSnapshotTestByCamera(burstTest);
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.media.MediaRecorder in project android_frameworks_base by AOSPA.
the class Camera2RecordingTest method doBasicRecording.
private void doBasicRecording(boolean useVideoStab) throws Exception {
for (int i = 0; i < mCameraIds.length; i++) {
try {
Log.i(TAG, "Testing basic recording for camera " + mCameraIds[i]);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(mCameraIds[i]);
if (!mStaticInfo.isColorOutputSupported()) {
Log.i(TAG, "Camera " + mCameraIds[i] + " does not support color outputs, skipping");
continue;
}
if (!mStaticInfo.isVideoStabilizationSupported() && useVideoStab) {
Log.i(TAG, "Camera " + mCameraIds[i] + " does not support video stabilization, skipping the stabilization" + " test");
continue;
}
initSupportedVideoSize(mCameraIds[i]);
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Recording video: %d/%d", iteration + 1, getIterationCount()));
basicRecordingTestByCamera(mCamcorderProfileList, useVideoStab);
getResultPrinter().printStatus(getIterationCount(), iteration + 1, mCameraIds[i]);
Thread.sleep(getTestWaitIntervalMs());
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.media.MediaRecorder in project Remindy by abicelis.
the class RecordAudioDialogFragment method startRecording.
private void startRecording() {
TransitionManager.beginDelayedTransition(mContainer);
mFab.setImageDrawable(ContextCompat.getDrawable(getContext(), R.drawable.icon_stop_fab_mini));
mFab.setSize(FloatingActionButton.SIZE_MINI);
RelativeLayout.LayoutParams lp = (RelativeLayout.LayoutParams) mFab.getLayoutParams();
//Same as lp.removeRule(...) only this call is supported from API 17 :(
lp.addRule(RelativeLayout.CENTER_HORIZONTAL, 0);
lp.addRule(RelativeLayout.ALIGN_PARENT_RIGHT, 1);
lp.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM, 1);
mFab.setLayoutParams(lp);
mTapToStartRecording.setVisibility(View.GONE);
mRecordTime.setVisibility(View.VISIBLE);
mVisualizer.setVisibility(View.VISIBLE);
mRecIcon.setVisibility(View.VISIBLE);
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(mAudioFilePath);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mRecorder.prepare();
} catch (IOException e) {
Log.e(TAG, "prepare() failed");
}
mRecorder.start();
mVizHandler.post(updateVisualizer);
startTime = SystemClock.uptimeMillis();
mTextHandler.postDelayed(updateTime, REPEAT_INTERVAL_TIME);
}
use of android.media.MediaRecorder in project android_frameworks_base by crdroidandroid.
the class Camera2RecordingTest method doBasicRecording.
private void doBasicRecording(boolean useVideoStab) throws Exception {
for (int i = 0; i < mCameraIds.length; i++) {
try {
Log.i(TAG, "Testing basic recording for camera " + mCameraIds[i]);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(mCameraIds[i]);
if (!mStaticInfo.isColorOutputSupported()) {
Log.i(TAG, "Camera " + mCameraIds[i] + " does not support color outputs, skipping");
continue;
}
if (!mStaticInfo.isVideoStabilizationSupported() && useVideoStab) {
Log.i(TAG, "Camera " + mCameraIds[i] + " does not support video stabilization, skipping the stabilization" + " test");
continue;
}
initSupportedVideoSize(mCameraIds[i]);
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Recording video: %d/%d", iteration + 1, getIterationCount()));
basicRecordingTestByCamera(mCamcorderProfileList, useVideoStab);
getResultPrinter().printStatus(getIterationCount(), iteration + 1, mCameraIds[i]);
Thread.sleep(getTestWaitIntervalMs());
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
use of android.media.MediaRecorder in project android_frameworks_base by crdroidandroid.
the class Camera2RecordingTest method constrainedHighSpeedRecording.
private void constrainedHighSpeedRecording() throws Exception {
for (String id : mCameraIds) {
try {
Log.i(TAG, "Testing constrained high speed recording for camera " + id);
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
if (!mStaticInfo.isConstrainedHighSpeedVideoSupported()) {
Log.i(TAG, "Camera " + id + " doesn't support high speed recording, skipping.");
continue;
}
// Test iteration starts...
for (int iteration = 0; iteration < getIterationCount(); ++iteration) {
Log.v(TAG, String.format("Constrained high speed recording: %d/%d", iteration + 1, getIterationCount()));
StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] highSpeedVideoSizes = config.getHighSpeedVideoSizes();
for (Size size : highSpeedVideoSizes) {
List<Range<Integer>> fixedFpsRanges = getHighSpeedFixedFpsRangeForSize(config, size);
mCollector.expectTrue("Unable to find the fixed frame rate fps range for " + "size " + size, fixedFpsRanges.size() > 0);
// Test recording for each FPS range
for (Range<Integer> fpsRange : fixedFpsRanges) {
int captureRate = fpsRange.getLower();
final int VIDEO_FRAME_RATE = 30;
// Skip the test if the highest recording FPS supported by CamcorderProfile
if (fpsRange.getUpper() > getFpsFromHighSpeedProfileForSize(size)) {
Log.w(TAG, "high speed recording " + size + "@" + captureRate + "fps" + " is not supported by CamcorderProfile");
continue;
}
mOutMediaFileName = VIDEO_FILE_PATH + "/test_cslowMo_video_" + captureRate + "fps_" + id + "_" + size.toString() + ".mp4";
prepareRecording(size, VIDEO_FRAME_RATE, captureRate);
// prepare preview surface by using video size.
updatePreviewSurfaceWithVideo(size, captureRate);
// Start recording
SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
startSlowMotionRecording(/*useMediaRecorder*/
true, VIDEO_FRAME_RATE, captureRate, fpsRange, resultListener, /*useHighSpeedSession*/
true);
// Record certain duration.
SystemClock.sleep(RECORDING_DURATION_MS);
// Stop recording and preview
stopRecording(/*useMediaRecorder*/
true);
// Convert number of frames camera produced into the duration in unit of ms.
int durationMs = (int) (resultListener.getTotalNumFrames() * 1000.0f / VIDEO_FRAME_RATE);
// Validation.
validateRecording(size, durationMs);
}
getResultPrinter().printStatus(getIterationCount(), iteration + 1, id);
Thread.sleep(getTestWaitIntervalMs());
}
}
} finally {
closeDevice();
releaseRecorder();
}
}
}
Aggregations