use of net.sourceforge.opencamera.CameraController.CameraControllerException in project OpenCamera by ageback.
the class Preview method openCameraCore.
/**
* Open the camera - this should be called from background thread, to avoid hogging the UI thread.
*/
private CameraController openCameraCore(int cameraId) {
long debug_time = 0;
if (MyDebug.LOG) {
Log.d(TAG, "openCameraCore()");
debug_time = System.currentTimeMillis();
}
// We pass a camera controller back to the UI thread rather than assigning to camera_controller here, because:
// * If we set camera_controller directly, we'd need to synchronize, otherwise risk of memory barrier issues
// * Risk of race conditions if UI thread accesses camera_controller before we have called cameraOpened().
CameraController camera_controller_local;
try {
if (MyDebug.LOG) {
Log.d(TAG, "try to open camera: " + cameraId);
Log.d(TAG, "openCamera: time before opening camera: " + (System.currentTimeMillis() - debug_time));
}
if (test_fail_open_camera) {
if (MyDebug.LOG)
Log.d(TAG, "test failing to open camera");
throw new CameraControllerException();
}
CameraController.ErrorCallback cameraErrorCallback = new CameraController.ErrorCallback() {
public void onError() {
if (MyDebug.LOG)
Log.e(TAG, "error from CameraController: camera device failed");
if (camera_controller != null) {
camera_controller = null;
camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED;
applicationInterface.onCameraError();
}
}
};
if (using_android_l) {
CameraController.ErrorCallback previewErrorCallback = new CameraController.ErrorCallback() {
public void onError() {
if (MyDebug.LOG)
Log.e(TAG, "error from CameraController: preview failed to start");
applicationInterface.onFailedStartPreview();
}
};
camera_controller_local = new CameraController2(Preview.this.getContext(), cameraId, previewErrorCallback, cameraErrorCallback);
if (applicationInterface.useCamera2FakeFlash()) {
camera_controller_local.setUseCamera2FakeFlash(true);
}
} else
camera_controller_local = new CameraController1(cameraId, cameraErrorCallback);
// throw new CameraControllerException(); // uncomment to test camera not opening
} catch (CameraControllerException e) {
if (MyDebug.LOG)
Log.e(TAG, "Failed to open camera: " + e.getMessage());
e.printStackTrace();
camera_controller_local = null;
}
if (MyDebug.LOG) {
Log.d(TAG, "openCamera: total time for openCameraCore: " + (System.currentTimeMillis() - debug_time));
}
return camera_controller_local;
}
use of net.sourceforge.opencamera.CameraController.CameraControllerException in project OpenCamera by ageback.
the class Preview method startVideoRecording.
/**
* Start video recording.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void startVideoRecording(final boolean max_filesize_restart) {
// clear focus rectangle (don't do for taking photos yet)
focus_success = FOCUS_DONE;
// initialise just in case:
boolean created_video_file = false;
video_method = ApplicationInterface.VIDEOMETHOD_FILE;
video_uri = null;
video_filename = null;
ParcelFileDescriptor pfd_saf = null;
try {
video_method = applicationInterface.createOutputVideoMethod();
if (MyDebug.LOG)
Log.e(TAG, "video_method? " + video_method);
if (video_method == ApplicationInterface.VIDEOMETHOD_FILE) {
File videoFile = applicationInterface.createOutputVideoFile();
video_filename = videoFile.getAbsolutePath();
created_video_file = true;
if (MyDebug.LOG)
Log.d(TAG, "save to: " + video_filename);
} else {
if (video_method == ApplicationInterface.VIDEOMETHOD_SAF) {
video_uri = applicationInterface.createOutputVideoSAF();
} else {
video_uri = applicationInterface.createOutputVideoUri();
}
created_video_file = true;
if (MyDebug.LOG)
Log.d(TAG, "save to: " + video_uri);
pfd_saf = getContext().getContentResolver().openFileDescriptor(video_uri, "rw");
}
} catch (IOException e) {
if (MyDebug.LOG)
Log.e(TAG, "Couldn't create media video file; check storage permissions?");
e.printStackTrace();
applicationInterface.onFailedCreateVideoFileError();
applicationInterface.cameraInOperation(false, true);
}
if (created_video_file) {
final VideoProfile profile = getVideoProfile();
if (MyDebug.LOG) {
Log.d(TAG, "current_video_quality: " + this.video_quality_handler.getCurrentVideoQualityIndex());
if (this.video_quality_handler.getCurrentVideoQualityIndex() != -1)
Log.d(TAG, "current_video_quality value: " + this.video_quality_handler.getCurrentVideoQuality());
Log.d(TAG, "resolution " + profile.videoFrameWidth + " x " + profile.videoFrameHeight);
Log.d(TAG, "bit rate " + profile.videoBitRate);
}
boolean enable_sound = applicationInterface.getShutterSoundPref();
if (MyDebug.LOG)
Log.d(TAG, "enable_sound? " + enable_sound);
// Camera2 API can disable video sound too
camera_controller.enableShutterSound(enable_sound);
MediaRecorder local_video_recorder = new MediaRecorder();
this.camera_controller.unlock();
if (MyDebug.LOG)
Log.d(TAG, "set video listeners");
local_video_recorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
@Override
public void onInfo(MediaRecorder mr, int what, int extra) {
if (MyDebug.LOG)
Log.d(TAG, "MediaRecorder info: " + what + " extra: " + extra);
final int final_what = what;
final int final_extra = extra;
Activity activity = (Activity) Preview.this.getContext();
activity.runOnUiThread(new Runnable() {
public void run() {
// we run on main thread to avoid problem of camera closing at the same time
onVideoInfo(final_what, final_extra);
}
});
}
});
local_video_recorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
public void onError(MediaRecorder mr, int what, int extra) {
final int final_what = what;
final int final_extra = extra;
Activity activity = (Activity) Preview.this.getContext();
activity.runOnUiThread(new Runnable() {
public void run() {
// we run on main thread to avoid problem of camera closing at the same time
onVideoError(final_what, final_extra);
}
});
}
});
camera_controller.initVideoRecorderPrePrepare(local_video_recorder);
if (profile.no_audio_permission) {
showToast(null, R.string.permission_record_audio_not_available);
}
boolean store_location = applicationInterface.getGeotaggingPref();
if (store_location && applicationInterface.getLocation() != null) {
Location location = applicationInterface.getLocation();
if (MyDebug.LOG) {
Log.d(TAG, "set video location: lat " + location.getLatitude() + " long " + location.getLongitude() + " accuracy " + location.getAccuracy());
}
local_video_recorder.setLocation((float) location.getLatitude(), (float) location.getLongitude());
}
if (MyDebug.LOG)
Log.d(TAG, "copy video profile to media recorder");
profile.copyToMediaRecorder(local_video_recorder);
// true if we called applicationInterface.startingVideo()
boolean told_app_starting = false;
try {
ApplicationInterface.VideoMaxFileSize video_max_filesize = applicationInterface.getVideoMaxFileSizePref();
long max_filesize = video_max_filesize.max_filesize;
// max_filesize = 15*1024*1024; // test
if (max_filesize > 0) {
if (MyDebug.LOG)
Log.d(TAG, "set max file size of: " + max_filesize);
try {
local_video_recorder.setMaxFileSize(max_filesize);
} catch (RuntimeException e) {
// Google Camera warns this can happen - for example, if 64-bit filesizes not supported
if (MyDebug.LOG)
Log.e(TAG, "failed to set max filesize of: " + max_filesize);
e.printStackTrace();
}
}
// note, we set this even if max_filesize==0, as it will still apply when hitting device max filesize limit
video_restart_on_max_filesize = video_max_filesize.auto_restart;
// handle restart timer
long video_max_duration = applicationInterface.getVideoMaxDurationPref();
if (MyDebug.LOG)
Log.d(TAG, "user preference video_max_duration: " + video_max_duration);
if (max_filesize_restart) {
if (video_max_duration > 0) {
video_max_duration -= video_accumulated_time;
// this should be greater or equal to min_safe_restart_video_time, as too short remaining time should have been caught in restartVideo()
if (video_max_duration < min_safe_restart_video_time) {
if (MyDebug.LOG)
Log.e(TAG, "trying to restart video with too short a time: " + video_max_duration);
video_max_duration = min_safe_restart_video_time;
}
}
} else {
video_accumulated_time = 0;
}
if (MyDebug.LOG)
Log.d(TAG, "actual video_max_duration: " + video_max_duration);
local_video_recorder.setMaxDuration((int) video_max_duration);
if (video_method == ApplicationInterface.VIDEOMETHOD_FILE) {
local_video_recorder.setOutputFile(video_filename);
} else {
local_video_recorder.setOutputFile(pfd_saf.getFileDescriptor());
}
applicationInterface.cameraInOperation(true, true);
told_app_starting = true;
applicationInterface.startingVideo();
/*if( true ) // test
throw new IOException();*/
cameraSurface.setVideoRecorder(local_video_recorder);
local_video_recorder.setOrientationHint(getImageVideoRotation());
if (MyDebug.LOG)
Log.d(TAG, "about to prepare video recorder");
local_video_recorder.prepare();
camera_controller.initVideoRecorderPostPrepare(local_video_recorder);
if (MyDebug.LOG)
Log.d(TAG, "about to start video recorder");
try {
local_video_recorder.start();
this.video_recorder = local_video_recorder;
videoRecordingStarted(max_filesize_restart);
} catch (RuntimeException e) {
// needed for emulator at least - although MediaRecorder not meant to work with emulator, it's good to fail gracefully
Log.e(TAG, "runtime exception starting video recorder");
e.printStackTrace();
// still assign, so failedToStartVideoRecorder() will release the video_recorder
this.video_recorder = local_video_recorder;
// told_app_starting must be true if we're here
applicationInterface.stoppingVideo();
failedToStartVideoRecorder(profile);
}
/*final MediaRecorder local_video_recorder_f = local_video_recorder;
new AsyncTask<Void, Void, Boolean>() {
private static final String TAG = "video_recorder.start";
@Override
protected Boolean doInBackground(Void... voids) {
if( MyDebug.LOG )
Log.d(TAG, "doInBackground, async task: " + this);
try {
local_video_recorder_f.start();
}
catch(RuntimeException e) {
// needed for emulator at least - although MediaRecorder not meant to work with emulator, it's good to fail gracefully
Log.e(TAG, "runtime exception starting video recorder");
e.printStackTrace();
return false;
}
return true;
}
@Override
protected void onPostExecute(Boolean success) {
if( MyDebug.LOG ) {
Log.d(TAG, "onPostExecute, async task: " + this);
Log.d(TAG, "success: " + success);
}
// still assign even if success==false, so failedToStartVideoRecorder() will release the video_recorder
Preview.this.video_recorder = local_video_recorder_f;
if( success ) {
videoRecordingStarted(max_filesize_restart);
}
else {
// told_app_starting must be true if we're here
applicationInterface.stoppingVideo();
failedToStartVideoRecorder(profile);
}
}
}.execute();*/
} catch (IOException e) {
if (MyDebug.LOG)
Log.e(TAG, "failed to save video");
e.printStackTrace();
this.video_recorder = local_video_recorder;
if (told_app_starting) {
applicationInterface.stoppingVideo();
}
applicationInterface.onFailedCreateVideoFileError();
video_recorder.reset();
video_recorder.release();
video_recorder = null;
video_recorder_is_paused = false;
applicationInterface.cameraInOperation(false, true);
this.reconnectCamera(true);
} catch (CameraControllerException e) {
if (MyDebug.LOG)
Log.e(TAG, "camera exception starting video recorder");
e.printStackTrace();
// still assign, so failedToStartVideoRecorder() will release the video_recorder
this.video_recorder = local_video_recorder;
if (told_app_starting) {
applicationInterface.stoppingVideo();
}
failedToStartVideoRecorder(profile);
} catch (NoFreeStorageException e) {
if (MyDebug.LOG)
Log.e(TAG, "nofreestorageexception starting video recorder");
e.printStackTrace();
this.video_recorder = local_video_recorder;
if (told_app_starting) {
applicationInterface.stoppingVideo();
}
video_recorder.reset();
video_recorder.release();
video_recorder = null;
video_recorder_is_paused = false;
applicationInterface.cameraInOperation(false, true);
this.reconnectCamera(true);
this.showToast(null, R.string.video_no_free_space);
}
}
}
use of net.sourceforge.opencamera.CameraController.CameraControllerException in project OpenCamera by ageback.
the class Preview method setupCamera.
/* Should only be called after camera first opened, or after preview is paused.
* take_photo is true if we have been called from the TakePhoto widget (which means
* we'll take a photo immediately after startup).
* Important to call this when switching between photo and video mode, as ApplicationInterface
* preferences/parameters may be different (since we can support taking photos in video snapshot
* mode, but this may have different parameters).
*/
public void setupCamera(boolean take_photo) {
if (MyDebug.LOG)
Log.d(TAG, "setupCamera()");
long debug_time = 0;
if (MyDebug.LOG) {
debug_time = System.currentTimeMillis();
}
if (camera_controller == null) {
if (MyDebug.LOG)
Log.d(TAG, "camera not opened!");
return;
}
boolean do_startup_focus = !take_photo && applicationInterface.getStartupFocusPref();
if (MyDebug.LOG) {
Log.d(TAG, "take_photo? " + take_photo);
Log.d(TAG, "do_startup_focus? " + do_startup_focus);
}
// make sure we're into continuous video mode for reopening
// workaround for bug on Samsung Galaxy S5 with UHD, where if the user switches to another (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the video is corrupted
// so to be safe, we always reset to continuous video mode
// although I've now fixed this at the level where we close the settings, I've put this guard here, just in case the problem occurs from elsewhere
// we'll switch to the user-requested focus by calling setFocusPref() from setupCameraParameters() below
this.updateFocusForVideo();
try {
setupCameraParameters();
} catch (CameraControllerException e) {
e.printStackTrace();
applicationInterface.onCameraError();
closeCamera(false, null);
return;
}
// now switch to video if saved
boolean saved_is_video = applicationInterface.isVideoPref();
if (MyDebug.LOG) {
Log.d(TAG, "saved_is_video: " + saved_is_video);
}
if (saved_is_video && !supports_video) {
if (MyDebug.LOG)
Log.d(TAG, "but video not supported");
saved_is_video = false;
}
// must switch video before starting preview
if (saved_is_video != this.is_video) {
if (MyDebug.LOG)
Log.d(TAG, "switch video mode as not in correct mode");
this.switchVideo(true, false);
}
updateFlashForVideo();
if (take_photo) {
if (this.is_video) {
if (MyDebug.LOG)
Log.d(TAG, "switch to video for take_photo widget");
this.switchVideo(true, true);
}
}
// in theory it shouldn't matter if we call setVideoHighSpeed(true) if is_video==false, as it should only have an effect
// in video mode; but don't set high speed mode in photo mode just to be safe
// Setup for high speed - must be done after setupCameraParameters() and switching to video mode, but before setPreviewSize() and startCameraPreview()
camera_controller.setVideoHighSpeed(is_video && video_high_speed);
if (do_startup_focus && using_android_l && camera_controller.supportsAutoFocus()) {
// need to switch flash off for autofocus - and for Android L, need to do this before starting preview (otherwise it won't work in time); for old camera API, need to do this after starting preview!
set_flash_value_after_autofocus = "";
String old_flash_value = camera_controller.getFlashValue();
// also set flash_torch - otherwise we get bug where torch doesn't turn on when starting up in video mode (and it's not like we want to turn torch off for startup focus, anyway)
if (old_flash_value.length() > 0 && !old_flash_value.equals("flash_off") && !old_flash_value.equals("flash_torch")) {
set_flash_value_after_autofocus = old_flash_value;
camera_controller.setFlashValue("flash_off");
}
if (MyDebug.LOG)
Log.d(TAG, "set_flash_value_after_autofocus is now: " + set_flash_value_after_autofocus);
}
if (this.supports_raw && applicationInterface.getRawPref() != ApplicationInterface.RawPref.RAWPREF_JPEG_ONLY) {
camera_controller.setRaw(true, applicationInterface.getMaxRawImages());
} else {
camera_controller.setRaw(false, 0);
}
if (this.supports_expo_bracketing && applicationInterface.isExpoBracketingPref()) {
camera_controller.setExpoBracketing(true);
camera_controller.setExpoBracketingNImages(applicationInterface.getExpoBracketingNImagesPref());
camera_controller.setExpoBracketingStops(applicationInterface.getExpoBracketingStopsPref());
// setUseExpoFastBurst called when taking a photo
} else {
camera_controller.setExpoBracketing(false);
}
if (this.supports_burst && applicationInterface.isCameraBurstPref()) {
if (applicationInterface.getBurstForNoiseReduction()) {
if (this.supports_exposure_time) {
// noise reduction mode also needs manual exposure
camera_controller.setWantBurst(true);
camera_controller.setBurstForNoiseReduction(true);
} else {
camera_controller.setWantBurst(false);
}
} else {
camera_controller.setWantBurst(true);
camera_controller.setBurstForNoiseReduction(false);
camera_controller.setBurstNImages(applicationInterface.getBurstNImages());
}
} else {
camera_controller.setWantBurst(false);
}
camera_controller.setOptimiseAEForDRO(applicationInterface.getOptimiseAEForDROPref());
// Must set preview size before starting camera preview
// and must do it after setting photo vs video mode
// need to call this when we switch cameras, not just when we run for the first time
setPreviewSize();
if (MyDebug.LOG) {
Log.d(TAG, "setupCamera: time after setting preview size: " + (System.currentTimeMillis() - debug_time));
}
// Must call startCameraPreview after checking if face detection is present - probably best to call it after setting all parameters that we want
startCameraPreview();
if (MyDebug.LOG) {
Log.d(TAG, "setupCamera: time after starting camera preview: " + (System.currentTimeMillis() - debug_time));
}
// also needs to be done after starting preview for some devices (e.g., Nexus 7)
if (this.has_zoom && applicationInterface.getZoomPref() != 0) {
zoomTo(applicationInterface.getZoomPref());
if (MyDebug.LOG) {
Log.d(TAG, "setupCamera: total time after zoomTo: " + (System.currentTimeMillis() - debug_time));
}
}
/*if( take_photo ) {
if( this.is_video ) {
if( MyDebug.LOG )
Log.d(TAG, "switch to video for take_photo widget");
this.switchVideo(false); // set during_startup to false, as we now need to reset the preview
}
}*/
// must call this after the above take_photo code for calling switchVideo
applicationInterface.cameraSetup();
if (MyDebug.LOG) {
Log.d(TAG, "setupCamera: total time after cameraSetup: " + (System.currentTimeMillis() - debug_time));
}
if (take_photo) {
// take photo after a delay - otherwise we sometimes get a black image?!
// also need a longer delay for continuous picture focus, to allow a chance to focus - 1000ms seems to work okay for Nexus 6, put 1500ms to be safe
String focus_value = getCurrentFocusValue();
final int delay = (focus_value != null && focus_value.equals("focus_mode_continuous_picture")) ? 1500 : 500;
if (MyDebug.LOG)
Log.d(TAG, "delay for take photo: " + delay);
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (MyDebug.LOG)
Log.d(TAG, "do automatic take picture");
takePicture(false, false);
}
}, delay);
}
if (do_startup_focus) {
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (MyDebug.LOG)
Log.d(TAG, "do startup autofocus");
// so we get the autofocus when starting up - we do this on a delay, as calling it immediately means the autofocus doesn't seem to work properly sometimes (at least on Galaxy Nexus)
tryAutoFocus(true, false);
}
}, 500);
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCamera: total time after setupCamera: " + (System.currentTimeMillis() - debug_time));
}
}
Aggregations