use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.
the class Preview method openCamera.
// private int debug_count_opencamera = 0; // see usage below
/**
* Try to open the camera. Should only be called if camera_controller==null.
* The camera will be opened on a background thread, so won't be available upon
* exit of this function.
* If camera_open_state is already CAMERAOPENSTATE_OPENING, this method does nothing.
*/
private void openCamera() {
long debug_time = 0;
if (MyDebug.LOG) {
Log.d(TAG, "openCamera()");
debug_time = System.currentTimeMillis();
}
if (camera_open_state == CameraOpenState.CAMERAOPENSTATE_OPENING) {
if (MyDebug.LOG)
Log.d(TAG, "already opening camera in background thread");
return;
} else if (camera_open_state == CameraOpenState.CAMERAOPENSTATE_CLOSING) {
Log.d(TAG, "tried to open camera while camera is still closing in background thread");
return;
}
// need to init everything now, in case we don't open the camera (but these may already be initialised from an earlier call - e.g., if we are now switching to another camera)
// n.b., don't reset has_set_location, as we can remember the location when switching camera
// theoretically should be false anyway, but I had one RuntimeException from surfaceCreated()->openCamera()->setupCamera()->setPreviewSize() because is_preview_started was true, even though the preview couldn't have been started
is_preview_started = false;
set_preview_size = false;
preview_w = 0;
preview_h = 0;
has_focus_area = false;
focus_success = FOCUS_DONE;
focus_started_time = -1;
synchronized (this) {
// synchronise for consistency (keep FindBugs happy)
take_photo_after_autofocus = false;
// no need to call camera_controller.setCaptureFollowAutofocusHint() as we're opening the camera
}
set_flash_value_after_autofocus = "";
successfully_focused = false;
preview_targetRatio = 0.0;
scene_modes = null;
has_zoom = false;
max_zoom_factor = 0;
minimum_focus_distance = 0.0f;
zoom_ratios = null;
faces_detected = null;
supports_face_detection = false;
using_face_detection = false;
supports_video_stabilization = false;
supports_photo_video_recording = false;
can_disable_shutter_sound = false;
color_effects = null;
white_balances = null;
isos = null;
supports_white_balance_temperature = false;
min_temperature = 0;
max_temperature = 0;
supports_iso_range = false;
min_iso = 0;
max_iso = 0;
supports_exposure_time = false;
min_exposure_time = 0L;
max_exposure_time = 0L;
exposures = null;
min_exposure = 0;
max_exposure = 0;
exposure_step = 0.0f;
supports_expo_bracketing = false;
max_expo_bracketing_n_images = 0;
supports_raw = false;
supports_burst = false;
// set a sensible default
view_angle_x = 55.0f;
// set a sensible default
view_angle_y = 43.0f;
sizes = null;
current_size_index = -1;
has_capture_rate_factor = false;
capture_rate_factor = 1.0f;
video_high_speed = false;
supports_video = true;
supports_video_high_speed = false;
video_quality_handler.resetCurrentQuality();
supported_flash_values = null;
current_flash_index = -1;
supported_focus_values = null;
current_focus_index = -1;
max_num_focus_areas = 0;
applicationInterface.cameraInOperation(false, false);
if (is_video)
applicationInterface.cameraInOperation(false, true);
if (!this.has_surface) {
if (MyDebug.LOG) {
Log.d(TAG, "preview surface not yet available");
}
return;
}
if (this.app_is_paused) {
if (MyDebug.LOG) {
Log.d(TAG, "don't open camera as app is paused");
}
return;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
// we restrict the checks to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener()
if (MyDebug.LOG)
Log.d(TAG, "check for permissions");
if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
if (MyDebug.LOG)
Log.d(TAG, "camera permission not available");
has_permissions = false;
applicationInterface.requestCameraPermission();
// return for now - the application should try to reopen the camera if permission is granted
return;
}
if (ContextCompat.checkSelfPermission(getContext(), Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
if (MyDebug.LOG)
Log.d(TAG, "storage permission not available");
has_permissions = false;
applicationInterface.requestStoragePermission();
// return for now - the application should try to reopen the camera if permission is granted
return;
}
if (MyDebug.LOG)
Log.d(TAG, "permissions available");
}
// set in case this was previously set to false
has_permissions = true;
/*{
// debug
if( debug_count_opencamera++ == 0 ) {
if( MyDebug.LOG )
Log.d(TAG, "debug: don't open camera yet");
return;
}
}*/
camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENING;
int cameraId = applicationInterface.getCameraIdPref();
if (cameraId < 0 || cameraId >= camera_controller_manager.getNumberOfCameras()) {
if (MyDebug.LOG)
Log.d(TAG, "invalid cameraId: " + cameraId);
cameraId = 0;
applicationInterface.setCameraIdPref(cameraId);
}
// final boolean use_background_thread = false;
// final boolean use_background_thread = true;
final boolean use_background_thread = Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
/* Opening camera on background thread is important so that we don't block the UI thread:
* - For old Camera API, this is recommended behaviour by Google for Camera.open().
- For Camera2, the manager.openCamera() call is asynchronous, but CameraController2
waits for it to open, so it's still important that we run that in a background thread.
* In theory this works for all Android versions, but this caused problems of Galaxy Nexus
* with tests testTakePhotoAutoLevel(), testTakePhotoAutoLevelAngles() (various camera
* errors/exceptions, failing to taking photos). Since this is a significant change, this is
* for now limited to modern devices.
*/
if (use_background_thread) {
final int cameraId_f = cameraId;
open_camera_task = new AsyncTask<Void, Void, CameraController>() {
private static final String TAG = "Preview/openCamera";
@Override
protected CameraController doInBackground(Void... voids) {
if (MyDebug.LOG)
Log.d(TAG, "doInBackground, async task: " + this);
return openCameraCore(cameraId_f);
}
/**
* The system calls this to perform work in the UI thread and delivers
* the result from doInBackground()
*/
protected void onPostExecute(CameraController camera_controller) {
if (MyDebug.LOG)
Log.d(TAG, "onPostExecute, async task: " + this);
// see note in openCameraCore() for why we set camera_controller here
Preview.this.camera_controller = camera_controller;
cameraOpened();
// set camera_open_state after cameraOpened, just in case a non-UI thread is listening for this - also
// important for test code waitUntilCameraOpened(), as test code runs on a different thread
camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED;
// just to be safe
open_camera_task = null;
if (MyDebug.LOG)
Log.d(TAG, "onPostExecute done, async task: " + this);
}
protected void onCancelled(CameraController camera_controller) {
if (MyDebug.LOG) {
Log.d(TAG, "onCancelled, async task: " + this);
Log.d(TAG, "camera_controller: " + camera_controller);
}
// dispose of the camera controller
if (camera_controller != null) {
// this is the local camera_controller, not Preview.this.camera_controller!
camera_controller.release();
}
// n.b., still set OPENED state - important for test thread to know that this callback is complete
camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED;
// just to be safe
open_camera_task = null;
if (MyDebug.LOG)
Log.d(TAG, "onCancelled done, async task: " + this);
}
}.execute();
} else {
this.camera_controller = openCameraCore(cameraId);
if (MyDebug.LOG) {
Log.d(TAG, "openCamera: time after opening camera: " + (System.currentTimeMillis() - debug_time));
}
cameraOpened();
camera_open_state = CameraOpenState.CAMERAOPENSTATE_OPENED;
}
if (MyDebug.LOG) {
Log.d(TAG, "openCamera: total time to open camera: " + (System.currentTimeMillis() - debug_time));
}
}
use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.
the class Preview method openCameraCore.
/**
* Open the camera - this should be called from background thread, to avoid hogging the UI thread.
*/
private CameraController openCameraCore(int cameraId) {
long debug_time = 0;
if (MyDebug.LOG) {
Log.d(TAG, "openCameraCore()");
debug_time = System.currentTimeMillis();
}
// We pass a camera controller back to the UI thread rather than assigning to camera_controller here, because:
// * If we set camera_controller directly, we'd need to synchronize, otherwise risk of memory barrier issues
// * Risk of race conditions if UI thread accesses camera_controller before we have called cameraOpened().
CameraController camera_controller_local;
try {
if (MyDebug.LOG) {
Log.d(TAG, "try to open camera: " + cameraId);
Log.d(TAG, "openCamera: time before opening camera: " + (System.currentTimeMillis() - debug_time));
}
if (test_fail_open_camera) {
if (MyDebug.LOG)
Log.d(TAG, "test failing to open camera");
throw new CameraControllerException();
}
CameraController.ErrorCallback cameraErrorCallback = new CameraController.ErrorCallback() {
public void onError() {
if (MyDebug.LOG)
Log.e(TAG, "error from CameraController: camera device failed");
if (camera_controller != null) {
camera_controller = null;
camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED;
applicationInterface.onCameraError();
}
}
};
if (using_android_l) {
CameraController.ErrorCallback previewErrorCallback = new CameraController.ErrorCallback() {
public void onError() {
if (MyDebug.LOG)
Log.e(TAG, "error from CameraController: preview failed to start");
applicationInterface.onFailedStartPreview();
}
};
camera_controller_local = new CameraController2(Preview.this.getContext(), cameraId, previewErrorCallback, cameraErrorCallback);
if (applicationInterface.useCamera2FakeFlash()) {
camera_controller_local.setUseCamera2FakeFlash(true);
}
} else
camera_controller_local = new CameraController1(cameraId, cameraErrorCallback);
// throw new CameraControllerException(); // uncomment to test camera not opening
} catch (CameraControllerException e) {
if (MyDebug.LOG)
Log.e(TAG, "Failed to open camera: " + e.getMessage());
e.printStackTrace();
camera_controller_local = null;
}
if (MyDebug.LOG) {
Log.d(TAG, "openCamera: total time for openCameraCore: " + (System.currentTimeMillis() - debug_time));
}
return camera_controller_local;
}
use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.
the class Preview method closeCamera.
private void closeCamera(boolean async, final CloseCameraCallback closeCameraCallback) {
long debug_time = 0;
if (MyDebug.LOG) {
Log.d(TAG, "closeCamera()");
Log.d(TAG, "async: " + async);
debug_time = System.currentTimeMillis();
}
removePendingContinuousFocusReset();
has_focus_area = false;
focus_success = FOCUS_DONE;
focus_started_time = -1;
synchronized (this) {
// synchronise for consistency (keep FindBugs happy)
take_photo_after_autofocus = false;
// no need to call camera_controller.setCaptureFollowAutofocusHint() as we're closing the camera
}
set_flash_value_after_autofocus = "";
successfully_focused = false;
preview_targetRatio = 0.0;
// n.b., don't reset has_set_location, as we can remember the location when switching camera
if (continuous_focus_move_is_started) {
continuous_focus_move_is_started = false;
applicationInterface.onContinuousFocusMove(false);
}
applicationInterface.cameraClosed();
cancelTimer();
cancelRepeat();
if (camera_controller != null) {
if (MyDebug.LOG) {
Log.d(TAG, "close camera_controller");
}
if (video_recorder != null) {
stopVideo(false);
}
// make sure we're into continuous video mode for closing
// workaround for bug on Samsung Galaxy S5 with UHD, where if the user switches to another (non-continuous-video) focus mode, then goes to Settings, then returns and records video, the preview freezes and the video is corrupted
// so to be safe, we always reset to continuous video mode
this.updateFocusForVideo();
// need to check for camera being non-null again - if an error occurred stopping the video, we will have closed the camera, and may not be able to reopen
if (camera_controller != null) {
// camera.setPreviewCallback(null);
if (MyDebug.LOG) {
Log.d(TAG, "closeCamera: about to pause preview: " + (System.currentTimeMillis() - debug_time));
}
pausePreview(false);
// we set camera_controller to null before starting background thread, so that other callers won't try
// to use it
final CameraController camera_controller_local = camera_controller;
camera_controller = null;
if (async) {
if (MyDebug.LOG)
Log.d(TAG, "close camera on background async");
camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSING;
close_camera_task = new CloseCameraTask(camera_controller_local, closeCameraCallback);
close_camera_task.execute();
} else {
if (MyDebug.LOG) {
Log.d(TAG, "closeCamera: about to release camera controller: " + (System.currentTimeMillis() - debug_time));
}
camera_controller_local.stopPreview();
if (MyDebug.LOG) {
Log.d(TAG, "time to stop preview: " + (System.currentTimeMillis() - debug_time));
}
camera_controller_local.release();
camera_open_state = CameraOpenState.CAMERAOPENSTATE_CLOSED;
}
}
} else {
if (MyDebug.LOG) {
Log.d(TAG, "camera_controller isn't open");
}
if (closeCameraCallback != null) {
// still need to call the callback though! (otherwise if camera fails to open, switch camera button won't work!)
if (MyDebug.LOG)
Log.d(TAG, "calling closeCameraCallback.onClosed");
closeCameraCallback.onClosed();
}
}
if (MyDebug.LOG) {
Log.d(TAG, "closeCamera: total time: " + (System.currentTimeMillis() - debug_time));
}
}
use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.
the class DrawPreview method onDrawInfoLines.
private void onDrawInfoLines(Canvas canvas, final int top_y, long time_ms) {
Preview preview = main_activity.getPreview();
CameraController camera_controller = preview.getCameraController();
int ui_rotation = preview.getUIRotation();
// set up text etc for the multiple lines of "info" (time, free mem, etc)
// convert dps to pixels
p.setTextSize(16 * scale + 0.5f);
p.setTextAlign(Paint.Align.LEFT);
// convert dps to pixels
int location_x = (int) ((show_battery_pref ? 15 : 5) * scale + 0.5f);
int location_y = top_y;
// convert dps to pixels
final int gap_y = (int) (0 * scale + 0.5f);
if (ui_rotation == 90 || ui_rotation == 270) {
int diff = canvas.getWidth() - canvas.getHeight();
location_x += diff / 2;
location_y -= diff / 2;
}
if (ui_rotation == 90) {
location_y = canvas.getHeight() - location_y - (int) (20 * scale + 0.5f);
}
if (ui_rotation == 180) {
location_x = canvas.getWidth() - location_x;
p.setTextAlign(Paint.Align.RIGHT);
}
if (show_time_pref) {
if (current_time_string == null || time_ms / 1000 > last_current_time_time / 1000) {
// avoid creating a new calendar object every time
if (calendar == null)
calendar = Calendar.getInstance();
else
calendar.setTimeInMillis(time_ms);
current_time_string = dateFormatTimeInstance.format(calendar.getTime());
// current_time_string = DateUtils.formatDateTime(getContext(), c.getTimeInMillis(), DateUtils.FORMAT_SHOW_TIME);
last_current_time_time = time_ms;
}
// int height = applicationInterface.drawTextWithBackground(canvas, p, current_time_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP);
if (text_bounds_time == null) {
if (MyDebug.LOG)
Log.d(TAG, "compute text_bounds_time");
text_bounds_time = new Rect();
String bounds_time_string = "00:00:00";
p.getTextBounds(bounds_time_string, 0, bounds_time_string.length(), text_bounds_time);
}
int height = applicationInterface.drawTextWithBackground(canvas, p, current_time_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, null, true, text_bounds_time);
height += gap_y;
if (ui_rotation == 90) {
location_y -= height;
} else {
location_y += height;
}
}
if (camera_controller != null && show_free_memory_pref) {
if (last_free_memory_time == 0 || time_ms > last_free_memory_time + 10000) {
// don't call this too often, for UI performance
long free_mb = main_activity.freeMemory();
if (free_mb >= 0) {
float new_free_memory_gb = free_mb / 1024.0f;
if (MyDebug.LOG) {
Log.d(TAG, "free_memory_gb: " + free_memory_gb);
Log.d(TAG, "new_free_memory_gb: " + new_free_memory_gb);
}
if (Math.abs(new_free_memory_gb - free_memory_gb) > 0.001f) {
free_memory_gb = new_free_memory_gb;
free_memory_gb_string = decimalFormat.format(free_memory_gb) + getContext().getResources().getString(R.string.gb_abbreviation);
}
}
// always set this, so that in case of free memory not being available, we aren't calling freeMemory() every frame
last_free_memory_time = time_ms;
}
if (free_memory_gb >= 0.0f && free_memory_gb_string != null) {
// int height = applicationInterface.drawTextWithBackground(canvas, p, free_memory_gb_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP);
if (text_bounds_free_memory == null) {
if (MyDebug.LOG)
Log.d(TAG, "compute text_bounds_free_memory");
text_bounds_free_memory = new Rect();
p.getTextBounds(free_memory_gb_string, 0, free_memory_gb_string.length(), text_bounds_free_memory);
}
int height = applicationInterface.drawTextWithBackground(canvas, p, free_memory_gb_string, Color.WHITE, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, null, true, text_bounds_free_memory);
height += gap_y;
if (ui_rotation == 90) {
location_y -= height;
} else {
location_y += height;
}
}
}
if (camera_controller != null && show_iso_pref) {
if (iso_exposure_string == null || time_ms > last_iso_exposure_time + 500) {
iso_exposure_string = "";
if (camera_controller.captureResultHasIso()) {
int iso = camera_controller.captureResultIso();
if (iso_exposure_string.length() > 0)
iso_exposure_string += " ";
iso_exposure_string += preview.getISOString(iso);
}
if (camera_controller.captureResultHasExposureTime()) {
long exposure_time = camera_controller.captureResultExposureTime();
if (iso_exposure_string.length() > 0)
iso_exposure_string += " ";
iso_exposure_string += preview.getExposureTimeString(exposure_time);
}
/*if( camera_controller.captureResultHasFrameDuration() ) {
long frame_duration = camera_controller.captureResultFrameDuration();
if( iso_exposure_string.length() > 0 )
iso_exposure_string += " ";
iso_exposure_string += preview.getFrameDurationString(frame_duration);
}*/
last_iso_exposure_time = time_ms;
}
if (iso_exposure_string.length() > 0) {
boolean is_scanning = false;
if (camera_controller.captureResultIsAEScanning()) {
// only show as scanning if in auto ISO mode (problem on Nexus 6 at least that if we're in manual ISO mode, after pausing and
// resuming, the camera driver continually reports CONTROL_AE_STATE_SEARCHING)
String value = sharedPreferences.getString(PreferenceKeys.ISOPreferenceKey, CameraController.ISO_DEFAULT);
if (value.equals("auto")) {
is_scanning = true;
}
}
// Yellow 500
int text_color = Color.rgb(255, 235, 59);
if (is_scanning) {
// we only change the color if ae scanning is at least a certain time, otherwise we get a lot of flickering of the color
if (ae_started_scanning_ms == -1) {
ae_started_scanning_ms = time_ms;
} else if (time_ms - ae_started_scanning_ms > 500) {
// Red 500
text_color = Color.rgb(244, 67, 54);
}
} else {
ae_started_scanning_ms = -1;
}
// can't cache the bounds rect, as the width may change significantly as the ISO or exposure values change
int height = applicationInterface.drawTextWithBackground(canvas, p, iso_exposure_string, text_color, Color.BLACK, location_x, location_y, MyApplicationInterface.Alignment.ALIGNMENT_TOP, ybounds_text, true);
height += gap_y;
// been enabled, we'll never be able to display the on-screen ISO)
if (ui_rotation == 90) {
location_y -= height;
} else {
location_y += height;
}
}
}
if (camera_controller != null) {
// padding to align with earlier text
// convert dps to pixels
final int flash_padding = (int) (1 * scale + 0.5f);
int location_x2 = location_x - flash_padding;
// convert dps to pixels
final int icon_size = (int) (16 * scale + 0.5f);
if (ui_rotation == 180) {
location_x2 = location_x - icon_size + flash_padding;
}
if (store_location_pref) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
if (applicationInterface.getLocation() != null) {
canvas.drawBitmap(location_bitmap, null, icon_dest, p);
int location_radius = icon_size / 10;
int indicator_x = location_x2 + icon_size - (int) (location_radius * 1.5);
int indicator_y = location_y + (int) (location_radius * 1.5);
// Green 500 or Yellow 500
p.setColor(applicationInterface.getLocation().getAccuracy() < 25.01f ? Color.rgb(37, 155, 36) : Color.rgb(255, 235, 59));
canvas.drawCircle(indicator_x, indicator_y, location_radius, p);
} else {
canvas.drawBitmap(location_off_bitmap, null, icon_dest, p);
}
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
// RAW not enabled in NR mode (see note in CameraController.takePictureBurst())
if (is_raw_pref && // RAW can be enabled, even if it isn't available for this camera (e.g., user enables RAW for back camera, but then switches to front camera which doesn't support it)
preview.supportsRaw() && photoMode != MyApplicationInterface.PhotoMode.HDR && photoMode != MyApplicationInterface.PhotoMode.ExpoBracketing && photoMode != MyApplicationInterface.PhotoMode.NoiseReduction) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(raw_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
if (is_face_detection_pref && preview.supportsFaceDetection()) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(face_detection_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
if (auto_stabilise_pref) {
// auto-level is supported for photos taken in video mode
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(auto_stabilise_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
if ((photoMode == MyApplicationInterface.PhotoMode.DRO || photoMode == MyApplicationInterface.PhotoMode.HDR || photoMode == MyApplicationInterface.PhotoMode.ExpoBracketing || photoMode == MyApplicationInterface.PhotoMode.FastBurst || photoMode == MyApplicationInterface.PhotoMode.NoiseReduction) && !applicationInterface.isVideoPref()) {
// these photo modes not supported for video mode
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
Bitmap bitmap = photoMode == MyApplicationInterface.PhotoMode.DRO ? dro_bitmap : photoMode == MyApplicationInterface.PhotoMode.HDR ? hdr_bitmap : photoMode == MyApplicationInterface.PhotoMode.ExpoBracketing ? expo_bitmap : photoMode == MyApplicationInterface.PhotoMode.FastBurst ? burst_bitmap : photoMode == MyApplicationInterface.PhotoMode.NoiseReduction ? nr_bitmap : null;
if (bitmap != null) {
canvas.drawBitmap(bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
}
// but it isn't supported in RAW-only mode
if (has_stamp_pref && !(is_raw_only_pref && preview.supportsRaw())) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(photostamp_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
if (!is_audio_enabled_pref && applicationInterface.isVideoPref()) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(audio_disabled_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
if (is_high_speed && applicationInterface.isVideoPref()) {
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha(64);
canvas.drawRect(icon_dest, p);
p.setAlpha(255);
canvas.drawBitmap(high_speed_fps_bitmap, null, icon_dest, p);
if (ui_rotation == 180) {
location_x2 -= icon_size + flash_padding;
} else {
location_x2 += icon_size + flash_padding;
}
}
String flash_value = preview.getCurrentFlashValue();
// note, flash_frontscreen_auto not yet support for the flash symbol (as camera_controller.needsFlash() only returns info on the built-in actual flash, not frontscreen flash)
if (flash_value != null && (flash_value.equals("flash_on") || flash_value.equals("flash_red_eye") || (flash_value.equals("flash_auto") && camera_controller.needsFlash())) && !applicationInterface.isVideoPref()) {
// flash-indicator not supported for photos taken in video mode
if (needs_flash_time != -1) {
final long fade_ms = 500;
float alpha = (time_ms - needs_flash_time) / (float) fade_ms;
if (time_ms - needs_flash_time >= fade_ms)
alpha = 1.0f;
icon_dest.set(location_x2, location_y, location_x2 + icon_size, location_y + icon_size);
/*if( MyDebug.LOG )
Log.d(TAG, "alpha: " + alpha);*/
p.setStyle(Paint.Style.FILL);
p.setColor(Color.BLACK);
p.setAlpha((int) (64 * alpha));
canvas.drawRect(icon_dest, p);
p.setAlpha((int) (255 * alpha));
canvas.drawBitmap(flash_bitmap, null, icon_dest, p);
} else {
needs_flash_time = time_ms;
}
} else {
needs_flash_time = -1;
}
}
}
use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.
the class DrawPreview method drawAngleLines.
private void drawAngleLines(Canvas canvas) {
Preview preview = main_activity.getPreview();
CameraController camera_controller = preview.getCameraController();
boolean has_level_angle = preview.hasLevelAngle();
if (camera_controller != null && !preview.isPreviewPaused() && has_level_angle && (show_angle_line_pref || show_pitch_lines_pref || show_geo_direction_lines_pref)) {
int ui_rotation = preview.getUIRotation();
double level_angle = preview.getLevelAngle();
boolean has_pitch_angle = preview.hasPitchAngle();
double pitch_angle = preview.getPitchAngle();
boolean has_geo_direction = preview.hasGeoDirection();
double geo_direction = preview.getGeoDirection();
// n.b., must draw this without the standard canvas rotation
int radius_dps = (ui_rotation == 90 || ui_rotation == 270) ? 60 : 80;
// convert dps to pixels
int radius = (int) (radius_dps * scale + 0.5f);
double angle = -preview.getOrigLevelAngle();
// see http://android-developers.blogspot.co.uk/2010/09/one-screen-turn-deserves-another.html
int rotation = main_activity.getWindowManager().getDefaultDisplay().getRotation();
switch(rotation) {
case Surface.ROTATION_90:
case Surface.ROTATION_270:
angle -= 90.0;
break;
case Surface.ROTATION_0:
case Surface.ROTATION_180:
default:
break;
}
/*if( MyDebug.LOG ) {
Log.d(TAG, "orig_level_angle: " + preview.getOrigLevelAngle());
Log.d(TAG, "angle: " + angle);
}*/
int cx = canvas.getWidth() / 2;
int cy = canvas.getHeight() / 2;
boolean is_level = false;
if (Math.abs(level_angle) <= close_level_angle) {
// n.b., use level_angle, not angle or orig_level_angle
is_level = true;
}
if (is_level) {
radius = (int) (radius * 1.2);
}
canvas.save();
canvas.rotate((float) angle, cx, cy);
final int line_alpha = 96;
// convert dps to pixels
float hthickness = (0.5f * scale + 0.5f);
p.setStyle(Paint.Style.FILL);
if (show_angle_line_pref) {
// draw outline
p.setColor(Color.BLACK);
p.setAlpha(64);
// can't use drawRoundRect(left, top, right, bottom, ...) as that requires API 21
draw_rect.set(cx - radius - hthickness, cy - 2 * hthickness, cx + radius + hthickness, cy + 2 * hthickness);
canvas.drawRoundRect(draw_rect, 2 * hthickness, 2 * hthickness, p);
// draw the vertical crossbar
draw_rect.set(cx - 2 * hthickness, cy - radius / 2 - hthickness, cx + 2 * hthickness, cy + radius / 2 + hthickness);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
// draw inner portion
if (is_level) {
p.setColor(angle_highlight_color_pref);
} else {
p.setColor(Color.WHITE);
}
p.setAlpha(line_alpha);
draw_rect.set(cx - radius, cy - hthickness, cx + radius, cy + hthickness);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
// draw the vertical crossbar
draw_rect.set(cx - hthickness, cy - radius / 2, cx + hthickness, cy + radius / 2);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
if (is_level) {
// draw a second line
p.setColor(Color.BLACK);
p.setAlpha(64);
draw_rect.set(cx - radius - hthickness, cy - 7 * hthickness, cx + radius + hthickness, cy - 3 * hthickness);
canvas.drawRoundRect(draw_rect, 2 * hthickness, 2 * hthickness, p);
p.setColor(angle_highlight_color_pref);
p.setAlpha(line_alpha);
draw_rect.set(cx - radius, cy - 6 * hthickness, cx + radius, cy - 4 * hthickness);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
}
}
float camera_angle_x = preview.getViewAngleX();
float camera_angle_y = preview.getViewAngleY();
float angle_scale_x = (float) (canvas.getWidth() / (2.0 * Math.tan(Math.toRadians((camera_angle_x / 2.0)))));
float angle_scale_y = (float) (canvas.getHeight() / (2.0 * Math.tan(Math.toRadians((camera_angle_y / 2.0)))));
/*if( MyDebug.LOG ) {
Log.d(TAG, "camera_angle_x: " + camera_angle_x);
Log.d(TAG, "camera_angle_y: " + camera_angle_y);
Log.d(TAG, "angle_scale_x: " + angle_scale_x);
Log.d(TAG, "angle_scale_y: " + angle_scale_y);
Log.d(TAG, "angle_scale_x/scale: " + angle_scale_x/scale);
Log.d(TAG, "angle_scale_y/scale: " + angle_scale_y/scale);
}*/
/*if( MyDebug.LOG ) {
Log.d(TAG, "has_pitch_angle?: " + has_pitch_angle);
Log.d(TAG, "show_pitch_lines?: " + show_pitch_lines);
}*/
float angle_scale = (float) Math.sqrt(angle_scale_x * angle_scale_x + angle_scale_y * angle_scale_y);
angle_scale *= preview.getZoomRatio();
if (has_pitch_angle && show_pitch_lines_pref) {
int pitch_radius_dps = (ui_rotation == 90 || ui_rotation == 270) ? 100 : 80;
// convert dps to pixels
int pitch_radius = (int) (pitch_radius_dps * scale + 0.5f);
int angle_step = 10;
if (preview.getZoomRatio() >= 2.0f)
angle_step = 5;
for (int latitude_angle = -90; latitude_angle <= 90; latitude_angle += angle_step) {
double this_angle = pitch_angle - latitude_angle;
if (Math.abs(this_angle) < 90.0) {
// angle_scale is already in pixels rather than dps
float pitch_distance = angle_scale * (float) Math.tan(Math.toRadians(this_angle));
/*if( MyDebug.LOG ) {
Log.d(TAG, "pitch_angle: " + pitch_angle);
Log.d(TAG, "pitch_distance_dp: " + pitch_distance_dp);
}*/
// draw outline
p.setColor(Color.BLACK);
p.setAlpha(64);
// can't use drawRoundRect(left, top, right, bottom, ...) as that requires API 21
draw_rect.set(cx - pitch_radius - hthickness, cy + pitch_distance - 2 * hthickness, cx + pitch_radius + hthickness, cy + pitch_distance + 2 * hthickness);
canvas.drawRoundRect(draw_rect, 2 * hthickness, 2 * hthickness, p);
// draw inner portion
p.setColor(Color.WHITE);
p.setTextAlign(Paint.Align.LEFT);
if (latitude_angle == 0 && Math.abs(pitch_angle) < 1.0) {
p.setAlpha(255);
} else {
p.setAlpha(line_alpha);
}
draw_rect.set(cx - pitch_radius, cy + pitch_distance - hthickness, cx + pitch_radius, cy + pitch_distance + hthickness);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
// draw pitch angle indicator
applicationInterface.drawTextWithBackground(canvas, p, "" + latitude_angle + "\u00B0", p.getColor(), Color.BLACK, (int) (cx + pitch_radius + 4 * hthickness), (int) (cy + pitch_distance - 2 * hthickness), MyApplicationInterface.Alignment.ALIGNMENT_CENTRE);
}
}
}
if (has_geo_direction && has_pitch_angle && show_geo_direction_lines_pref) {
int geo_radius_dps = (ui_rotation == 90 || ui_rotation == 270) ? 80 : 100;
// convert dps to pixels
int geo_radius = (int) (geo_radius_dps * scale + 0.5f);
float geo_angle = (float) Math.toDegrees(geo_direction);
int angle_step = 10;
if (preview.getZoomRatio() >= 2.0f)
angle_step = 5;
for (int longitude_angle = 0; longitude_angle < 360; longitude_angle += angle_step) {
double this_angle = longitude_angle - geo_angle;
// normalise to be in interval [0, 360)
while (this_angle >= 360.0) this_angle -= 360.0;
while (this_angle < -360.0) this_angle += 360.0;
// pick shortest angle
if (this_angle > 180.0)
this_angle = -(360.0 - this_angle);
if (Math.abs(this_angle) < 90.0) {
/*if( MyDebug.LOG ) {
Log.d(TAG, "this_angle is now: " + this_angle);
}*/
// angle_scale is already in pixels rather than dps
float geo_distance = angle_scale * (float) Math.tan(Math.toRadians(this_angle));
// draw outline
p.setColor(Color.BLACK);
p.setAlpha(64);
// can't use drawRoundRect(left, top, right, bottom, ...) as that requires API 21
draw_rect.set(cx + geo_distance - 2 * hthickness, cy - geo_radius - hthickness, cx + geo_distance + 2 * hthickness, cy + geo_radius + hthickness);
canvas.drawRoundRect(draw_rect, 2 * hthickness, 2 * hthickness, p);
// draw inner portion
p.setColor(Color.WHITE);
p.setTextAlign(Paint.Align.CENTER);
p.setAlpha(line_alpha);
draw_rect.set(cx + geo_distance - hthickness, cy - geo_radius, cx + geo_distance + hthickness, cy + geo_radius);
canvas.drawRoundRect(draw_rect, hthickness, hthickness, p);
// draw geo direction angle indicator
applicationInterface.drawTextWithBackground(canvas, p, "" + longitude_angle + "\u00B0", p.getColor(), Color.BLACK, (int) (cx + geo_distance), (int) (cy - geo_radius - 4 * hthickness), MyApplicationInterface.Alignment.ALIGNMENT_BOTTOM);
}
}
}
p.setAlpha(255);
// reset
p.setStyle(Paint.Style.FILL);
canvas.restore();
}
}
Aggregations