Search in sources :

Example 6 with CameraController

use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.

the class DrawPreview method doFocusAnimation.

private void doFocusAnimation(Canvas canvas, long time_ms) {
    Preview preview = main_activity.getPreview();
    CameraController camera_controller = preview.getCameraController();
    if (camera_controller != null && continuous_focus_moving && !taking_picture) {
        // we don't display the continuous focusing animation when taking a photo - and can also give the impression of having
        // frozen if we pause because the image saver queue is full
        long dt = time_ms - continuous_focus_moving_ms;
        final long length = 1000;
        /*if( MyDebug.LOG )
				Log.d(TAG, "continuous focus moving, dt: " + dt);*/
        if (dt <= length) {
            float frac = ((float) dt) / (float) length;
            float pos_x = canvas.getWidth() / 2.0f;
            float pos_y = canvas.getHeight() / 2.0f;
            // convert dps to pixels
            float min_radius = (40 * scale + 0.5f);
            // convert dps to pixels
            float max_radius = (60 * scale + 0.5f);
            float radius;
            if (frac < 0.5f) {
                float alpha = frac * 2.0f;
                radius = (1.0f - alpha) * min_radius + alpha * max_radius;
            } else {
                float alpha = (frac - 0.5f) * 2.0f;
                radius = (1.0f - alpha) * max_radius + alpha * min_radius;
            }
            /*if( MyDebug.LOG ) {
					Log.d(TAG, "dt: " + dt);
					Log.d(TAG, "radius: " + radius);
				}*/
            p.setColor(Color.WHITE);
            p.setStyle(Paint.Style.STROKE);
            p.setStrokeWidth(stroke_width);
            canvas.drawCircle(pos_x, pos_y, radius, p);
            // reset
            p.setStyle(Paint.Style.FILL);
        } else {
            clearContinuousFocusMove();
        }
    }
    if (preview.isFocusWaiting() || preview.isFocusRecentSuccess() || preview.isFocusRecentFailure()) {
        long time_since_focus_started = preview.timeSinceStartedAutoFocus();
        // convert dps to pixels
        float min_radius = (40 * scale + 0.5f);
        // convert dps to pixels
        float max_radius = (45 * scale + 0.5f);
        float radius = min_radius;
        if (time_since_focus_started > 0) {
            final long length = 500;
            float frac = ((float) time_since_focus_started) / (float) length;
            if (frac > 1.0f)
                frac = 1.0f;
            if (frac < 0.5f) {
                float alpha = frac * 2.0f;
                radius = (1.0f - alpha) * min_radius + alpha * max_radius;
            } else {
                float alpha = (frac - 0.5f) * 2.0f;
                radius = (1.0f - alpha) * max_radius + alpha * min_radius;
            }
        }
        int size = (int) radius;
        if (preview.isFocusRecentSuccess())
            // Green A400
            p.setColor(Color.rgb(20, 231, 21));
        else if (preview.isFocusRecentFailure())
            // Red 500
            p.setColor(Color.rgb(244, 67, 54));
        else
            p.setColor(Color.WHITE);
        p.setStyle(Paint.Style.STROKE);
        p.setStrokeWidth(stroke_width);
        int pos_x;
        int pos_y;
        if (preview.hasFocusArea()) {
            Pair<Integer, Integer> focus_pos = preview.getFocusPos();
            pos_x = focus_pos.first;
            pos_y = focus_pos.second;
        } else {
            pos_x = canvas.getWidth() / 2;
            pos_y = canvas.getHeight() / 2;
        }
        float frac = 0.5f;
        // horizontal strokes
        canvas.drawLine(pos_x - size, pos_y - size, pos_x - frac * size, pos_y - size, p);
        canvas.drawLine(pos_x + frac * size, pos_y - size, pos_x + size, pos_y - size, p);
        canvas.drawLine(pos_x - size, pos_y + size, pos_x - frac * size, pos_y + size, p);
        canvas.drawLine(pos_x + frac * size, pos_y + size, pos_x + size, pos_y + size, p);
        // vertical strokes
        canvas.drawLine(pos_x - size, pos_y - size, pos_x - size, pos_y - frac * size, p);
        canvas.drawLine(pos_x - size, pos_y + frac * size, pos_x - size, pos_y + size, p);
        canvas.drawLine(pos_x + size, pos_y - size, pos_x + size, pos_y - frac * size, p);
        canvas.drawLine(pos_x + size, pos_y + frac * size, pos_x + size, pos_y + size, p);
        // reset
        p.setStyle(Paint.Style.FILL);
    }
}
Also used : CameraController(net.sourceforge.opencamera.CameraController.CameraController) Preview(net.sourceforge.opencamera.Preview.Preview) Paint(android.graphics.Paint)

Example 7 with CameraController

use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.

the class DrawPreview method onDrawPreview.

public void onDrawPreview(Canvas canvas) {
    /*if( MyDebug.LOG )
			Log.d(TAG, "onDrawPreview");*/
    if (!has_settings) {
        if (MyDebug.LOG)
            Log.d(TAG, "onDrawPreview: need to update settings");
        updateSettings();
    }
    Preview preview = main_activity.getPreview();
    CameraController camera_controller = preview.getCameraController();
    int ui_rotation = preview.getUIRotation();
    final long time_ms = System.currentTimeMillis();
    // see documentation for CameraController.shouldCoverPreview()
    if (preview.usingCamera2API() && (camera_controller == null || camera_controller.shouldCoverPreview())) {
        p.setColor(Color.BLACK);
        canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p);
    }
    if (camera_controller != null && front_screen_flash) {
        p.setColor(Color.WHITE);
        canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p);
    }
    if (main_activity.getMainUI().inImmersiveMode()) {
        if (immersive_mode_everything_pref) {
            // in immersive_mode_everything mode)
            return;
        }
    }
    if (camera_controller != null && taking_picture && !front_screen_flash && take_photo_border_pref) {
        p.setColor(Color.WHITE);
        p.setStyle(Paint.Style.STROKE);
        p.setStrokeWidth(stroke_width);
        // convert dps to pixels
        float this_stroke_width = (5.0f * scale + 0.5f);
        p.setStrokeWidth(this_stroke_width);
        canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p);
        // reset
        p.setStyle(Paint.Style.FILL);
        // reset
        p.setStrokeWidth(stroke_width);
    }
    drawGrids(canvas);
    drawCropGuides(canvas);
    if (show_last_image && last_thumbnail != null) {
        // If changing this code, ensure that pause preview still works when:
        // - Taking a photo in portrait or landscape - and check rotating the device while preview paused
        // - Taking a photo with lock to portrait/landscape options still shows the thumbnail with aspect ratio preserved
        // in case image doesn't cover the canvas (due to different aspect ratios)
        p.setColor(Color.rgb(0, 0, 0));
        // in case
        canvas.drawRect(0.0f, 0.0f, canvas.getWidth(), canvas.getHeight(), p);
        last_image_src_rect.left = 0;
        last_image_src_rect.top = 0;
        last_image_src_rect.right = last_thumbnail.getWidth();
        last_image_src_rect.bottom = last_thumbnail.getHeight();
        if (ui_rotation == 90 || ui_rotation == 270) {
            last_image_src_rect.right = last_thumbnail.getHeight();
            last_image_src_rect.bottom = last_thumbnail.getWidth();
        }
        last_image_dst_rect.left = 0;
        last_image_dst_rect.top = 0;
        last_image_dst_rect.right = canvas.getWidth();
        last_image_dst_rect.bottom = canvas.getHeight();
        /*if( MyDebug.LOG ) {
				Log.d(TAG, "thumbnail: " + last_thumbnail.getWidth() + " x " + last_thumbnail.getHeight());
				Log.d(TAG, "canvas: " + canvas.getWidth() + " x " + canvas.getHeight());
			}*/
        // use CENTER to preserve aspect ratio
        last_image_matrix.setRectToRect(last_image_src_rect, last_image_dst_rect, Matrix.ScaleToFit.CENTER);
        if (ui_rotation == 90 || ui_rotation == 270) {
            // the rotation maps (0, 0) to (tw/2 - th/2, th/2 - tw/2), so we translate to undo this
            float diff = last_thumbnail.getHeight() - last_thumbnail.getWidth();
            last_image_matrix.preTranslate(diff / 2.0f, -diff / 2.0f);
        }
        last_image_matrix.preRotate(ui_rotation, last_thumbnail.getWidth() / 2.0f, last_thumbnail.getHeight() / 2.0f);
        canvas.drawBitmap(last_thumbnail, last_image_matrix, p);
    }
    doThumbnailAnimation(canvas, time_ms);
    drawUI(canvas, time_ms);
    drawAngleLines(canvas);
    doFocusAnimation(canvas, time_ms);
    CameraController.Face[] faces_detected = preview.getFacesDetected();
    if (faces_detected != null) {
        // Yellow 500
        p.setColor(Color.rgb(255, 235, 59));
        p.setStyle(Paint.Style.STROKE);
        p.setStrokeWidth(stroke_width);
        for (CameraController.Face face : faces_detected) {
            // Android doc recommends filtering out faces with score less than 50 (same for both Camera and Camera2 APIs)
            if (face.score >= 50) {
                canvas.drawRect(face.rect, p);
            }
        }
        // reset
        p.setStyle(Paint.Style.FILL);
    }
    if (enable_gyro_target_spot) {
        GyroSensor gyroSensor = main_activity.getApplicationInterface().getGyroSensor();
        if (gyroSensor.isRecording()) {
            gyroSensor.getRelativeInverseVector(transformed_gyro_direction, gyro_direction);
            // note that although X of gyro_direction represents left to right on the device, because we're in landscape mode,
            // this is y coordinates on the screen
            float angle_x = -(float) Math.asin(transformed_gyro_direction[1]);
            float angle_y = -(float) Math.asin(transformed_gyro_direction[0]);
            if (Math.abs(angle_x) < 0.5f * Math.PI && Math.abs(angle_y) < 0.5f * Math.PI) {
                float camera_angle_x = preview.getViewAngleX();
                float camera_angle_y = preview.getViewAngleY();
                float angle_scale_x = (float) (canvas.getWidth() / (2.0 * Math.tan(Math.toRadians((camera_angle_x / 2.0)))));
                float angle_scale_y = (float) (canvas.getHeight() / (2.0 * Math.tan(Math.toRadians((camera_angle_y / 2.0)))));
                angle_scale_x *= preview.getZoomRatio();
                angle_scale_y *= preview.getZoomRatio();
                // angle_scale is already in pixels rather than dps
                float distance_x = angle_scale_x * (float) Math.tan(angle_x);
                // angle_scale is already in pixels rather than dps
                float distance_y = angle_scale_y * (float) Math.tan(angle_y);
                p.setColor(Color.WHITE);
                // draw spot for the centre of the screen, to help the user orient the device
                drawGyroSpot(canvas, 0.0f, 0.0f);
                p.setColor(Color.BLUE);
                drawGyroSpot(canvas, distance_x, distance_y);
            }
        }
    }
}
Also used : CameraController(net.sourceforge.opencamera.CameraController.CameraController) Preview(net.sourceforge.opencamera.Preview.Preview) GyroSensor(net.sourceforge.opencamera.GyroSensor) Paint(android.graphics.Paint)

Example 8 with CameraController

use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.

the class MainActivity method showPhotoVideoToast.

/**
 * Displays a toast with information about the current preferences.
 *  If always_show is true, the toast is always displayed; otherwise, we only display
 *  a toast if it's important to notify the user (i.e., unusual non-default settings are
 *  set). We want a balance between not pestering the user too much, whilst also reminding
 *  them if certain settings are on.
 */
private void showPhotoVideoToast(boolean always_show) {
    if (MyDebug.LOG) {
        Log.d(TAG, "showPhotoVideoToast");
        Log.d(TAG, "always_show? " + always_show);
    }
    CameraController camera_controller = preview.getCameraController();
    if (camera_controller == null || this.camera_in_background) {
        if (MyDebug.LOG)
            Log.d(TAG, "camera not open or in background");
        return;
    }
    String toast_string;
    SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
    boolean simple = true;
    boolean video_high_speed = preview.isVideoHighSpeed();
    if (preview.isVideo()) {
        VideoProfile profile = preview.getVideoProfile();
        String bitrate_string;
        if (profile.videoBitRate >= 10000000)
            bitrate_string = profile.videoBitRate / 1000000 + "Mbps";
        else if (profile.videoBitRate >= 10000)
            bitrate_string = profile.videoBitRate / 1000 + "Kbps";
        else
            bitrate_string = profile.videoBitRate + "bps";
        toast_string = getResources().getString(R.string.video) + ": " + profile.videoFrameWidth + "x" + profile.videoFrameHeight + ", " + profile.videoFrameRate + getResources().getString(R.string.fps) + (video_high_speed ? " [" + getResources().getString(R.string.high_speed) + "]" : "") + ", " + bitrate_string;
        String fps_value = applicationInterface.getVideoFPSPref();
        if (!fps_value.equals("default") || video_high_speed) {
            simple = false;
        }
        boolean record_audio = applicationInterface.getRecordAudioPref();
        if (!record_audio) {
            toast_string += "\n" + getResources().getString(R.string.audio_disabled);
            simple = false;
        }
        String max_duration_value = sharedPreferences.getString(PreferenceKeys.getVideoMaxDurationPreferenceKey(), "0");
        if (max_duration_value.length() > 0 && !max_duration_value.equals("0")) {
            String[] entries_array = getResources().getStringArray(R.array.preference_video_max_duration_entries);
            String[] values_array = getResources().getStringArray(R.array.preference_video_max_duration_values);
            int index = Arrays.asList(values_array).indexOf(max_duration_value);
            if (index != -1) {
                // just in case!
                String entry = entries_array[index];
                toast_string += "\n" + getResources().getString(R.string.max_duration) + ": " + entry;
                simple = false;
            }
        }
        long max_filesize = applicationInterface.getVideoMaxFileSizeUserPref();
        if (max_filesize != 0) {
            long max_filesize_mb = max_filesize / (1024 * 1024);
            toast_string += "\n" + getResources().getString(R.string.max_filesize) + ": " + max_filesize_mb + getResources().getString(R.string.mb_abbreviation);
            simple = false;
        }
        if (applicationInterface.getVideoFlashPref() && preview.supportsFlash()) {
            toast_string += "\n" + getResources().getString(R.string.preference_video_flash);
            simple = false;
        }
    } else {
        toast_string = getResources().getString(R.string.photo);
        CameraController.Size current_size = preview.getCurrentPictureSize();
        toast_string += " " + current_size.width + "x" + current_size.height;
        if (preview.supportsFocus() && preview.getSupportedFocusValues().size() > 1) {
            String focus_value = preview.getCurrentFocusValue();
            if (focus_value != null && !focus_value.equals("focus_mode_auto") && !focus_value.equals("focus_mode_continuous_picture")) {
                String focus_entry = preview.findFocusEntryForValue(focus_value);
                if (focus_entry != null) {
                    toast_string += "\n" + focus_entry;
                }
            }
        }
        if (applicationInterface.getAutoStabilisePref()) {
            // important as users are sometimes confused at the behaviour if they don't realise the option is on
            toast_string += "\n" + getResources().getString(R.string.preference_auto_stabilise);
            simple = false;
        }
        String photo_mode_string = null;
        MyApplicationInterface.PhotoMode photo_mode = applicationInterface.getPhotoMode();
        if (photo_mode == MyApplicationInterface.PhotoMode.DRO) {
            photo_mode_string = getResources().getString(R.string.photo_mode_dro);
        } else if (photo_mode == MyApplicationInterface.PhotoMode.HDR) {
            photo_mode_string = getResources().getString(R.string.photo_mode_hdr);
        } else if (photo_mode == MyApplicationInterface.PhotoMode.ExpoBracketing) {
            photo_mode_string = getResources().getString(R.string.photo_mode_expo_bracketing_full);
        } else if (photo_mode == MyApplicationInterface.PhotoMode.FastBurst) {
            photo_mode_string = getResources().getString(R.string.photo_mode_fast_burst_full);
            int n_images = applicationInterface.getBurstNImages();
            photo_mode_string += " (" + n_images + ")";
        } else if (photo_mode == MyApplicationInterface.PhotoMode.NoiseReduction) {
            photo_mode_string = getResources().getString(R.string.photo_mode_noise_reduction_full);
        }
        if (photo_mode_string != null) {
            toast_string += "\n" + getResources().getString(R.string.photo_mode) + ": " + photo_mode_string;
            simple = false;
        }
    }
    if (applicationInterface.getFaceDetectionPref()) {
        // important so that the user realises why touching for focus/metering areas won't work - easy to forget that face detection has been turned on!
        toast_string += "\n" + getResources().getString(R.string.preference_face_detection);
        simple = false;
    }
    if (!video_high_speed) {
        // manual ISO only supported for high speed video
        String iso_value = applicationInterface.getISOPref();
        if (!iso_value.equals(CameraController.ISO_DEFAULT)) {
            toast_string += "\nISO: " + iso_value;
            if (preview.supportsExposureTime()) {
                long exposure_time_value = applicationInterface.getExposureTimePref();
                toast_string += " " + preview.getExposureTimeString(exposure_time_value);
            }
            simple = false;
        }
        int current_exposure = camera_controller.getExposureCompensation();
        if (current_exposure != 0) {
            toast_string += "\n" + preview.getExposureCompensationString(current_exposure);
            simple = false;
        }
    }
    String scene_mode = camera_controller.getSceneMode();
    if (scene_mode != null && !scene_mode.equals(CameraController.SCENE_MODE_DEFAULT)) {
        toast_string += "\n" + getResources().getString(R.string.scene_mode) + ": " + mainUI.getEntryForSceneMode(scene_mode);
        simple = false;
    }
    String white_balance = camera_controller.getWhiteBalance();
    if (white_balance != null && !white_balance.equals(CameraController.WHITE_BALANCE_DEFAULT)) {
        toast_string += "\n" + getResources().getString(R.string.white_balance) + ": " + mainUI.getEntryForWhiteBalance(white_balance);
        if (white_balance.equals("manual") && preview.supportsWhiteBalanceTemperature()) {
            toast_string += " " + camera_controller.getWhiteBalanceTemperature();
        }
        simple = false;
    }
    String color_effect = camera_controller.getColorEffect();
    if (color_effect != null && !color_effect.equals(CameraController.COLOR_EFFECT_DEFAULT)) {
        toast_string += "\n" + getResources().getString(R.string.color_effect) + ": " + mainUI.getEntryForColorEffect(color_effect);
        simple = false;
    }
    String lock_orientation = applicationInterface.getLockOrientationPref();
    if (!lock_orientation.equals("none")) {
        String[] entries_array = getResources().getStringArray(R.array.preference_lock_orientation_entries);
        String[] values_array = getResources().getStringArray(R.array.preference_lock_orientation_values);
        int index = Arrays.asList(values_array).indexOf(lock_orientation);
        if (index != -1) {
            // just in case!
            String entry = entries_array[index];
            toast_string += "\n" + entry;
            simple = false;
        }
    }
    String timer = sharedPreferences.getString(PreferenceKeys.getTimerPreferenceKey(), "0");
    if (!timer.equals("0")) {
        String[] entries_array = getResources().getStringArray(R.array.preference_timer_entries);
        String[] values_array = getResources().getStringArray(R.array.preference_timer_values);
        int index = Arrays.asList(values_array).indexOf(timer);
        if (index != -1) {
            // just in case!
            String entry = entries_array[index];
            toast_string += "\n" + getResources().getString(R.string.preference_timer) + ": " + entry;
            simple = false;
        }
    }
    String repeat = applicationInterface.getRepeatPref();
    if (!repeat.equals("1")) {
        String[] entries_array = getResources().getStringArray(R.array.preference_burst_mode_entries);
        String[] values_array = getResources().getStringArray(R.array.preference_burst_mode_values);
        int index = Arrays.asList(values_array).indexOf(repeat);
        if (index != -1) {
            // just in case!
            String entry = entries_array[index];
            toast_string += "\n" + getResources().getString(R.string.preference_burst_mode) + ": " + entry;
            simple = false;
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "toast_string: " + toast_string);
        Log.d(TAG, "simple?: " + simple);
    }
    if (!simple || always_show)
        preview.showToast(switch_video_toast, toast_string);
}
Also used : CameraController(net.sourceforge.opencamera.CameraController.CameraController) SharedPreferences(android.content.SharedPreferences) VideoProfile(net.sourceforge.opencamera.Preview.VideoProfile) SuppressLint(android.annotation.SuppressLint)

Example 9 with CameraController

use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.

the class Preview method setupCameraParameters.

private void setupCameraParameters() throws CameraControllerException {
    if (MyDebug.LOG)
        Log.d(TAG, "setupCameraParameters()");
    long debug_time = 0;
    if (MyDebug.LOG) {
        debug_time = System.currentTimeMillis();
    }
    {
        // this doesn't appear to apply to Camera2 API, but we still might as well set scene mode first
        if (MyDebug.LOG)
            Log.d(TAG, "set up scene mode");
        String value = applicationInterface.getSceneModePref();
        if (MyDebug.LOG)
            Log.d(TAG, "saved scene mode: " + value);
        CameraController.SupportedValues supported_values = camera_controller.setSceneMode(value);
        if (supported_values != null) {
            scene_modes = supported_values.values;
            // now save, so it's available for PreferenceActivity
            applicationInterface.setSceneModePref(supported_values.selected_value);
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearSceneModePref();
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after setting scene mode: " + (System.currentTimeMillis() - debug_time));
    }
    {
        // grab all read-only info from parameters
        if (MyDebug.LOG)
            Log.d(TAG, "grab info from parameters");
        CameraController.CameraFeatures camera_features = camera_controller.getCameraFeatures();
        this.has_zoom = camera_features.is_zoom_supported;
        if (this.has_zoom) {
            this.max_zoom_factor = camera_features.max_zoom;
            this.zoom_ratios = camera_features.zoom_ratios;
        }
        this.minimum_focus_distance = camera_features.minimum_focus_distance;
        this.supports_face_detection = camera_features.supports_face_detection;
        this.sizes = camera_features.picture_sizes;
        supported_flash_values = camera_features.supported_flash_values;
        supported_focus_values = camera_features.supported_focus_values;
        this.max_num_focus_areas = camera_features.max_num_focus_areas;
        this.is_exposure_lock_supported = camera_features.is_exposure_lock_supported;
        this.supports_video_stabilization = camera_features.is_video_stabilization_supported;
        this.supports_photo_video_recording = camera_features.is_photo_video_recording_supported;
        this.can_disable_shutter_sound = camera_features.can_disable_shutter_sound;
        this.supports_white_balance_temperature = camera_features.supports_white_balance_temperature;
        this.min_temperature = camera_features.min_temperature;
        this.max_temperature = camera_features.max_temperature;
        this.supports_iso_range = camera_features.supports_iso_range;
        this.min_iso = camera_features.min_iso;
        this.max_iso = camera_features.max_iso;
        this.supports_exposure_time = camera_features.supports_exposure_time;
        this.min_exposure_time = camera_features.min_exposure_time;
        this.max_exposure_time = camera_features.max_exposure_time;
        this.min_exposure = camera_features.min_exposure;
        this.max_exposure = camera_features.max_exposure;
        this.exposure_step = camera_features.exposure_step;
        this.supports_expo_bracketing = camera_features.supports_expo_bracketing;
        this.max_expo_bracketing_n_images = camera_features.max_expo_bracketing_n_images;
        this.supports_raw = camera_features.supports_raw;
        this.supports_burst = camera_features.supports_burst;
        this.view_angle_x = camera_features.view_angle_x;
        this.view_angle_y = camera_features.view_angle_y;
        this.supports_video_high_speed = camera_features.video_sizes_high_speed != null && camera_features.video_sizes_high_speed.size() > 0;
        this.video_quality_handler.setVideoSizes(camera_features.video_sizes);
        this.video_quality_handler.setVideoSizesHighSpeed(camera_features.video_sizes_high_speed);
        this.supported_preview_sizes = camera_features.preview_sizes;
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after getting read only info: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up face detection");
        // get face detection supported
        this.faces_detected = null;
        if (this.supports_face_detection) {
            this.using_face_detection = applicationInterface.getFaceDetectionPref();
        } else {
            this.using_face_detection = false;
        }
        if (MyDebug.LOG) {
            Log.d(TAG, "supports_face_detection?: " + supports_face_detection);
            Log.d(TAG, "using_face_detection?: " + using_face_detection);
        }
        if (this.using_face_detection) {
            class MyFaceDetectionListener implements CameraController.FaceDetectionListener {

                final Handler handler = new Handler();

                int last_n_faces = -1;

                FaceLocation last_face_location = FaceLocation.FACELOCATION_UNSET;

                /**
                 * Note, at least for Camera2 API, onFaceDetection() isn't called on UI thread.
                 */
                @Override
                public void onFaceDetection(final CameraController.Face[] faces) {
                    if (MyDebug.LOG)
                        Log.d(TAG, "onFaceDetection: " + faces.length + " : " + Arrays.toString(faces));
                    if (camera_controller == null) {
                        // can get a crash in some cases when switching camera when face detection is on (at least for Camera2)
                        Activity activity = (Activity) Preview.this.getContext();
                        activity.runOnUiThread(new Runnable() {

                            public void run() {
                                faces_detected = null;
                            }
                        });
                        return;
                    }
                    // don't assign to faces_detected yet, as that has to be done on the UI thread
                    // We don't synchronize on faces_detected, as the array may be passed to other
                    // classes via getFacesDetected(). Although that function could copy instead,
                    // that would mean an allocation in every frame in DrawPreview.
                    // Easier to just do the assignment on the UI thread.
                    Activity activity = (Activity) Preview.this.getContext();
                    activity.runOnUiThread(new Runnable() {

                        public void run() {
                            // convert rects to preview screen space - also needs to be done on UI thread
                            // (otherwise can have crashes if camera_controller becomes null in the meantime)
                            final Matrix matrix = getCameraToPreviewMatrix();
                            for (CameraController.Face face : faces) {
                                face_rect.set(face.rect);
                                matrix.mapRect(face_rect);
                                face_rect.round(face.rect);
                            }
                            reportFaces(faces);
                            if (faces_detected == null || faces_detected.length != faces.length) {
                                // avoid unnecessary reallocations
                                if (MyDebug.LOG)
                                    Log.d(TAG, "allocate new faces_detected");
                                faces_detected = new CameraController.Face[faces.length];
                            }
                            System.arraycopy(faces, 0, faces_detected, 0, faces.length);
                        }
                    });
                }

                /**
                 * Accessibility: report number of faces for talkback etc.
                 */
                private void reportFaces(CameraController.Face[] local_faces) {
                    // View.announceForAccessibility requires JELLY_BEAN
                    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN && accessibility_manager.isEnabled() && accessibility_manager.isTouchExplorationEnabled()) {
                        int n_faces = local_faces.length;
                        FaceLocation face_location = FaceLocation.FACELOCATION_UNKNOWN;
                        if (n_faces > 0) {
                            // set face_location
                            float avg_x = 0, avg_y = 0;
                            final float bdry_frac_c = 0.35f;
                            boolean all_centre = true;
                            for (CameraController.Face face : local_faces) {
                                float face_x = face.rect.centerX();
                                float face_y = face.rect.centerY();
                                face_x /= (float) cameraSurface.getView().getWidth();
                                face_y /= (float) cameraSurface.getView().getHeight();
                                if (all_centre) {
                                    if (face_x < bdry_frac_c || face_x > 1.0f - bdry_frac_c || face_y < bdry_frac_c || face_y > 1.0f - bdry_frac_c)
                                        all_centre = false;
                                }
                                avg_x += face_x;
                                avg_y += face_y;
                            }
                            avg_x /= n_faces;
                            avg_y /= n_faces;
                            if (MyDebug.LOG) {
                                Log.d(TAG, "    avg_x: " + avg_x);
                                Log.d(TAG, "    avg_y: " + avg_y);
                                Log.d(TAG, "    ui_rotation: " + ui_rotation);
                            }
                            if (all_centre) {
                                face_location = FaceLocation.FACELOCATION_CENTRE;
                            } else {
                                switch(ui_rotation) {
                                    case 0:
                                        break;
                                    case 90:
                                        {
                                            float temp = avg_x;
                                            avg_x = avg_y;
                                            avg_y = 1.0f - temp;
                                            break;
                                        }
                                    case 180:
                                        avg_x = 1.0f - avg_x;
                                        avg_y = 1.0f - avg_y;
                                        break;
                                    case 270:
                                        {
                                            float temp = avg_x;
                                            avg_x = 1.0f - avg_y;
                                            avg_y = temp;
                                            break;
                                        }
                                }
                                if (MyDebug.LOG) {
                                    Log.d(TAG, "    avg_x: " + avg_x);
                                    Log.d(TAG, "    avg_y: " + avg_y);
                                }
                                if (avg_x < bdry_frac_c)
                                    face_location = FaceLocation.FACELOCATION_LEFT;
                                else if (avg_x > 1.0f - bdry_frac_c)
                                    face_location = FaceLocation.FACELOCATION_RIGHT;
                                else if (avg_y < bdry_frac_c)
                                    face_location = FaceLocation.FACELOCATION_TOP;
                                else if (avg_y > 1.0f - bdry_frac_c)
                                    face_location = FaceLocation.FACELOCATION_BOTTOM;
                            }
                        }
                        if (n_faces != last_n_faces || face_location != last_face_location) {
                            if (n_faces == 0 && last_n_faces == -1) {
                            // only say 0 faces detected if previously the number was non-zero
                            } else {
                                String string = n_faces + " " + getContext().getResources().getString(n_faces == 1 ? R.string.face_detected : R.string.faces_detected);
                                if (n_faces > 0 && face_location != FaceLocation.FACELOCATION_UNKNOWN) {
                                    switch(face_location) {
                                        case FACELOCATION_CENTRE:
                                            string += " " + getContext().getResources().getString(R.string.centre_of_screen);
                                            break;
                                        case FACELOCATION_LEFT:
                                            string += " " + getContext().getResources().getString(R.string.left_of_screen);
                                            break;
                                        case FACELOCATION_RIGHT:
                                            string += " " + getContext().getResources().getString(R.string.right_of_screen);
                                            break;
                                        case FACELOCATION_TOP:
                                            string += " " + getContext().getResources().getString(R.string.top_of_screen);
                                            break;
                                        case FACELOCATION_BOTTOM:
                                            string += " " + getContext().getResources().getString(R.string.bottom_of_screen);
                                            break;
                                    }
                                }
                                final String string_f = string;
                                if (MyDebug.LOG)
                                    Log.d(TAG, string);
                                // to avoid having a big queue of saying "one face detected, two faces detected" etc, we only report
                                // after a delay, cancelling any that were previously queued
                                handler.removeCallbacksAndMessages(null);
                                handler.postDelayed(new Runnable() {

                                    @Override
                                    public void run() {
                                        if (MyDebug.LOG)
                                            Log.d(TAG, "announceForAccessibility: " + string_f);
                                        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
                                            Preview.this.getView().announceForAccessibility(string_f);
                                        }
                                    }
                                }, 500);
                            }
                            last_n_faces = n_faces;
                            last_face_location = face_location;
                        }
                    }
                }
            }
            camera_controller.setFaceDetectionListener(new MyFaceDetectionListener());
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after setting face detection: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up video stabilization");
        if (this.supports_video_stabilization) {
            boolean using_video_stabilization = applicationInterface.getVideoStabilizationPref();
            if (MyDebug.LOG)
                Log.d(TAG, "using_video_stabilization?: " + using_video_stabilization);
            camera_controller.setVideoStabilization(using_video_stabilization);
        }
        if (MyDebug.LOG)
            Log.d(TAG, "supports_video_stabilization?: " + supports_video_stabilization);
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after video stabilization: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up color effect");
        String value = applicationInterface.getColorEffectPref();
        if (MyDebug.LOG)
            Log.d(TAG, "saved color effect: " + value);
        CameraController.SupportedValues supported_values = camera_controller.setColorEffect(value);
        if (supported_values != null) {
            color_effects = supported_values.values;
            // now save, so it's available for PreferenceActivity
            applicationInterface.setColorEffectPref(supported_values.selected_value);
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearColorEffectPref();
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after color effect: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up white balance");
        String value = applicationInterface.getWhiteBalancePref();
        if (MyDebug.LOG)
            Log.d(TAG, "saved white balance: " + value);
        CameraController.SupportedValues supported_values = camera_controller.setWhiteBalance(value);
        if (supported_values != null) {
            white_balances = supported_values.values;
            // now save, so it's available for PreferenceActivity
            applicationInterface.setWhiteBalancePref(supported_values.selected_value);
            if (supported_values.selected_value.equals("manual") && this.supports_white_balance_temperature) {
                int temperature = applicationInterface.getWhiteBalanceTemperaturePref();
                camera_controller.setWhiteBalanceTemperature(temperature);
                if (MyDebug.LOG)
                    Log.d(TAG, "saved white balance: " + value);
            }
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearWhiteBalancePref();
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after white balance: " + (System.currentTimeMillis() - debug_time));
    }
    // must be done before setting flash modes, as we may remove flash modes if in manual mode
    if (MyDebug.LOG)
        Log.d(TAG, "set up iso");
    String value = applicationInterface.getISOPref();
    if (MyDebug.LOG)
        Log.d(TAG, "saved iso: " + value);
    boolean is_manual_iso = false;
    if (supports_iso_range) {
        // in this mode, we can set any ISO value from min to max
        // if supports_iso_range==true, caller shouldn't be using getSupportedISOs()
        this.isos = null;
        // now set the desired ISO mode/value
        if (value.equals(CameraController.ISO_DEFAULT)) {
            if (MyDebug.LOG)
                Log.d(TAG, "setting auto iso");
            camera_controller.setManualISO(false, 0);
        } else {
            int iso = parseManualISOValue(value);
            if (iso >= 0) {
                is_manual_iso = true;
                if (MyDebug.LOG)
                    Log.d(TAG, "iso: " + iso);
                camera_controller.setManualISO(true, iso);
            } else {
                // failed to parse
                camera_controller.setManualISO(false, 0);
                // so we switch the preferences back to auto mode, rather than the invalid value
                value = CameraController.ISO_DEFAULT;
            }
            // now save, so it's available for PreferenceActivity
            applicationInterface.setISOPref(value);
        }
    } else {
        // in this mode, any support for ISO is only the specific ISOs offered by the CameraController
        CameraController.SupportedValues supported_values = camera_controller.setISO(value);
        if (supported_values != null) {
            isos = supported_values.values;
            if (!supported_values.selected_value.equals(CameraController.ISO_DEFAULT)) {
                if (MyDebug.LOG)
                    Log.d(TAG, "has manual iso");
                is_manual_iso = true;
            }
            // now save, so it's available for PreferenceActivity
            applicationInterface.setISOPref(supported_values.selected_value);
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearISOPref();
        }
    }
    if (is_manual_iso) {
        if (supports_exposure_time) {
            long exposure_time_value = applicationInterface.getExposureTimePref();
            if (MyDebug.LOG)
                Log.d(TAG, "saved exposure_time: " + exposure_time_value);
            if (exposure_time_value < getMinimumExposureTime())
                exposure_time_value = getMinimumExposureTime();
            else if (exposure_time_value > getMaximumExposureTime())
                exposure_time_value = getMaximumExposureTime();
            camera_controller.setExposureTime(exposure_time_value);
            // now save
            applicationInterface.setExposureTimePref(exposure_time_value);
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearExposureTimePref();
        }
        if (this.using_android_l && supported_flash_values != null) {
            // flash modes not supported when using Camera2 and manual ISO
            // (it's unclear flash is useful - ideally we'd at least offer torch, but ISO seems to reset to 100 when flash/torch is on!)
            supported_flash_values = null;
            if (MyDebug.LOG)
                Log.d(TAG, "flash not supported in Camera2 manual mode");
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after manual iso: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG) {
            Log.d(TAG, "set up exposure compensation");
            Log.d(TAG, "min_exposure: " + min_exposure);
            Log.d(TAG, "max_exposure: " + max_exposure);
        }
        // get min/max exposure
        exposures = null;
        if (min_exposure != 0 || max_exposure != 0) {
            exposures = new ArrayList<>();
            for (int i = min_exposure; i <= max_exposure; i++) {
                exposures.add("" + i);
            }
            // if in manual ISO mode, we still want to get the valid exposure compensations, but shouldn't set exposure compensation
            if (!is_manual_iso) {
                int exposure = applicationInterface.getExposureCompensationPref();
                if (exposure < min_exposure || exposure > max_exposure) {
                    exposure = 0;
                    if (MyDebug.LOG)
                        Log.d(TAG, "saved exposure not supported, reset to 0");
                    if (exposure < min_exposure || exposure > max_exposure) {
                        if (MyDebug.LOG)
                            Log.d(TAG, "zero isn't an allowed exposure?! reset to min " + min_exposure);
                        exposure = min_exposure;
                    }
                }
                camera_controller.setExposureCompensation(exposure);
                // now save, so it's available for PreferenceActivity
                applicationInterface.setExposureCompensationPref(exposure);
            }
        } else {
            // delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
            applicationInterface.clearExposureCompensationPref();
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after exposures: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up picture sizes");
        if (MyDebug.LOG) {
            for (int i = 0; i < sizes.size(); i++) {
                CameraController.Size size = sizes.get(i);
                Log.d(TAG, "supported picture size: " + size.width + " , " + size.height);
            }
        }
        current_size_index = -1;
        Pair<Integer, Integer> resolution = applicationInterface.getCameraResolutionPref();
        if (resolution != null) {
            int resolution_w = resolution.first;
            int resolution_h = resolution.second;
            // now find size in valid list
            for (int i = 0; i < sizes.size() && current_size_index == -1; i++) {
                CameraController.Size size = sizes.get(i);
                if (size.width == resolution_w && size.height == resolution_h) {
                    current_size_index = i;
                    if (MyDebug.LOG)
                        Log.d(TAG, "set current_size_index to: " + current_size_index);
                }
            }
            if (current_size_index == -1) {
                if (MyDebug.LOG)
                    Log.e(TAG, "failed to find valid size");
            }
        }
        if (current_size_index == -1) {
            // set to largest
            CameraController.Size current_size = null;
            for (int i = 0; i < sizes.size(); i++) {
                CameraController.Size size = sizes.get(i);
                if (current_size == null || size.width * size.height > current_size.width * current_size.height) {
                    current_size_index = i;
                    current_size = size;
                }
            }
        }
        if (current_size_index != -1) {
            CameraController.Size current_size = sizes.get(current_size_index);
            if (MyDebug.LOG)
                Log.d(TAG, "Current size index " + current_size_index + ": " + current_size.width + ", " + current_size.height);
            // now save, so it's available for PreferenceActivity
            applicationInterface.setCameraResolutionPref(current_size.width, current_size.height);
        }
    // size set later in setPreviewSize()
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after picture sizes: " + (System.currentTimeMillis() - debug_time));
    }
    {
        int image_quality = applicationInterface.getImageQualityPref();
        if (MyDebug.LOG)
            Log.d(TAG, "set up jpeg quality: " + image_quality);
        camera_controller.setJpegQuality(image_quality);
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after jpeg quality: " + (System.currentTimeMillis() - debug_time));
    }
    // get available sizes
    initialiseVideoSizes();
    initialiseVideoQuality();
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after video sizes: " + (System.currentTimeMillis() - debug_time));
    }
    String video_quality_value_s = applicationInterface.getVideoQualityPref();
    if (MyDebug.LOG)
        Log.d(TAG, "video_quality_value: " + video_quality_value_s);
    video_quality_handler.setCurrentVideoQualityIndex(-1);
    if (video_quality_value_s.length() > 0) {
        // now find value in valid list
        for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size() && video_quality_handler.getCurrentVideoQualityIndex() == -1; i++) {
            if (video_quality_handler.getSupportedVideoQuality().get(i).equals(video_quality_value_s)) {
                video_quality_handler.setCurrentVideoQualityIndex(i);
                if (MyDebug.LOG)
                    Log.d(TAG, "set current_video_quality to: " + video_quality_handler.getCurrentVideoQualityIndex());
            }
        }
        if (video_quality_handler.getCurrentVideoQualityIndex() == -1) {
            if (MyDebug.LOG)
                Log.e(TAG, "failed to find valid video_quality");
        }
    }
    if (video_quality_handler.getCurrentVideoQualityIndex() == -1 && video_quality_handler.getSupportedVideoQuality().size() > 0) {
        // default to FullHD if available, else pick highest quality
        // (FullHD will give smaller file sizes and generally give better performance than 4K so probably better for most users; also seems to suffer from less problems when using manual ISO in Camera2 API)
        // start with highest quality
        video_quality_handler.setCurrentVideoQualityIndex(0);
        for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size(); i++) {
            if (MyDebug.LOG)
                Log.d(TAG, "check video quality: " + video_quality_handler.getSupportedVideoQuality().get(i));
            CamcorderProfile profile = getCamcorderProfile(video_quality_handler.getSupportedVideoQuality().get(i));
            if (profile.videoFrameWidth == 1920 && profile.videoFrameHeight == 1080) {
                video_quality_handler.setCurrentVideoQualityIndex(i);
                break;
            }
        }
        if (MyDebug.LOG)
            Log.d(TAG, "set video_quality value to " + video_quality_handler.getCurrentVideoQuality());
    }
    if (video_quality_handler.getCurrentVideoQualityIndex() != -1) {
        // now save, so it's available for PreferenceActivity
        applicationInterface.setVideoQualityPref(video_quality_handler.getCurrentVideoQuality());
    } else {
        // This means video_quality_handler.getSupportedVideoQuality().size() is 0 - this could happen if the camera driver
        // supports no camcorderprofiles? In this case, we shouldn't support video.
        Log.e(TAG, "no video qualities found");
        supports_video = false;
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after handling video quality: " + (System.currentTimeMillis() - debug_time));
    }
    if (supports_video) {
        // set up high speed frame rates
        // should be done after checking the requested video size is available
        video_high_speed = false;
        if (this.supports_video_high_speed) {
            VideoProfile profile = getVideoProfile();
            if (MyDebug.LOG)
                Log.d(TAG, "check if we need high speed video for " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
            CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(profile.videoFrameWidth, profile.videoFrameHeight, profile.videoCaptureRate);
            if (best_video_size == null && video_quality_handler.getSupportedVideoSizesHighSpeed() != null) {
                Log.e(TAG, "can't find match for capture rate: " + profile.videoCaptureRate + " and video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
                // try falling back to one of the supported high speed resolutions
                CameraController.Size requested_size = video_quality_handler.getMaxSupportedVideoSizeHighSpeed();
                profile.videoFrameWidth = requested_size.width;
                profile.videoFrameHeight = requested_size.height;
                // now try again
                best_video_size = CameraController.CameraFeatures.findSize(video_quality_handler.getSupportedVideoSizesHighSpeed(), requested_size, profile.videoCaptureRate, false);
                if (best_video_size != null) {
                    if (MyDebug.LOG)
                        Log.d(TAG, "fall back to a supported video size for high speed fps");
                    // need to write back to the application
                    // so find the corresponding quality value
                    video_quality_handler.setCurrentVideoQualityIndex(-1);
                    for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size(); i++) {
                        if (MyDebug.LOG)
                            Log.d(TAG, "check video quality: " + video_quality_handler.getSupportedVideoQuality().get(i));
                        CamcorderProfile camcorder_profile = getCamcorderProfile(video_quality_handler.getSupportedVideoQuality().get(i));
                        if (camcorder_profile.videoFrameWidth == profile.videoFrameWidth && camcorder_profile.videoFrameHeight == profile.videoFrameHeight) {
                            video_quality_handler.setCurrentVideoQualityIndex(i);
                            break;
                        }
                    }
                    if (video_quality_handler.getCurrentVideoQualityIndex() != -1) {
                        if (MyDebug.LOG)
                            Log.d(TAG, "reset to video quality: " + video_quality_handler.getCurrentVideoQuality());
                        applicationInterface.setVideoQualityPref(video_quality_handler.getCurrentVideoQuality());
                    } else {
                        if (MyDebug.LOG)
                            Log.d(TAG, "but couldn't find a corresponding video quality");
                        best_video_size = null;
                    }
                }
            }
            if (best_video_size == null) {
                Log.e(TAG, "fps not supported for this video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
            // we'll end up trying to record at the requested resolution and fps even though these seem incompatible;
            // the camera driver will either ignore the requested fps, or fail
            } else if (best_video_size.high_speed) {
                video_high_speed = true;
            }
        }
        if (MyDebug.LOG)
            Log.d(TAG, "video_high_speed?: " + video_high_speed);
    }
    if (is_video && video_high_speed && supports_iso_range && is_manual_iso) {
        if (MyDebug.LOG)
            Log.d(TAG, "manual mode not supported for video_high_speed");
        camera_controller.setManualISO(false, 0);
        is_manual_iso = false;
    }
    {
        if (MyDebug.LOG) {
            Log.d(TAG, "set up flash");
            Log.d(TAG, "flash values: " + supported_flash_values);
        }
        current_flash_index = -1;
        if (supported_flash_values != null && supported_flash_values.size() > 1) {
            String flash_value = applicationInterface.getFlashPref();
            if (flash_value.length() > 0) {
                if (MyDebug.LOG)
                    Log.d(TAG, "found existing flash_value: " + flash_value);
                if (!updateFlash(flash_value, false)) {
                    // don't need to save, as this is the value that's already saved
                    if (MyDebug.LOG)
                        Log.d(TAG, "flash value no longer supported!");
                    updateFlash(0, true);
                }
            } else {
                if (MyDebug.LOG)
                    Log.d(TAG, "found no existing flash_value");
                // see testTakePhotoFrontCameraScreenFlash
                if (supported_flash_values.contains("flash_auto"))
                    updateFlash("flash_auto", true);
                else
                    updateFlash("flash_off", true);
            }
        } else {
            if (MyDebug.LOG)
                Log.d(TAG, "flash not supported");
            supported_flash_values = null;
        }
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after setting up flash: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up focus");
        current_focus_index = -1;
        if (supported_focus_values != null && supported_focus_values.size() > 1) {
            if (MyDebug.LOG)
                Log.d(TAG, "focus values: " + supported_focus_values);
            setFocusPref(true);
        } else {
            if (MyDebug.LOG)
                Log.d(TAG, "focus not supported");
            supported_focus_values = null;
        }
    /*supported_focus_values = new ArrayList<>();
			supported_focus_values.add("focus_mode_auto");
			supported_focus_values.add("focus_mode_infinity");
			supported_focus_values.add("focus_mode_macro");
			supported_focus_values.add("focus_mode_locked");
			supported_focus_values.add("focus_mode_manual2");
			supported_focus_values.add("focus_mode_fixed");
			supported_focus_values.add("focus_mode_edof");
			supported_focus_values.add("focus_mode_continuous_video");*/
    /*View focusModeButton = (View) activity.findViewById(R.id.focus_mode);
			focusModeButton.setVisibility(supported_focus_values != null && !immersive_mode ? View.VISIBLE : View.GONE);*/
    }
    {
        float focus_distance_value = applicationInterface.getFocusDistancePref();
        if (MyDebug.LOG)
            Log.d(TAG, "saved focus_distance: " + focus_distance_value);
        if (focus_distance_value < 0.0f)
            focus_distance_value = 0.0f;
        else if (focus_distance_value > minimum_focus_distance)
            focus_distance_value = minimum_focus_distance;
        camera_controller.setFocusDistance(focus_distance_value);
        // now save
        applicationInterface.setFocusDistancePref(focus_distance_value);
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: time after setting up focus: " + (System.currentTimeMillis() - debug_time));
    }
    {
        if (MyDebug.LOG)
            Log.d(TAG, "set up exposure lock");
        // exposure lock should always default to false, as doesn't make sense to save it - we can't really preserve a "lock" after the camera is reopened
        // also note that it isn't safe to lock the exposure before starting the preview
        is_exposure_locked = false;
    }
    if (MyDebug.LOG) {
        Log.d(TAG, "setupCameraParameters: total time for setting up camera parameters: " + (System.currentTimeMillis() - debug_time));
    }
}
Also used : CameraController(net.sourceforge.opencamera.CameraController.CameraController) ArrayList(java.util.ArrayList) Activity(android.app.Activity) CamcorderProfile(android.media.CamcorderProfile) Matrix(android.graphics.Matrix) Pair(android.util.Pair) Handler(android.os.Handler) Paint(android.graphics.Paint) Point(android.graphics.Point)

Example 10 with CameraController

use of net.sourceforge.opencamera.CameraController.CameraController in project OpenCamera by ageback.

the class DrawPreview method drawGrids.

private void drawGrids(Canvas canvas) {
    Preview preview = main_activity.getPreview();
    CameraController camera_controller = preview.getCameraController();
    if (camera_controller == null) {
        return;
    }
    switch(preference_grid_pref) {
        case "preference_grid_3x3":
            p.setColor(Color.WHITE);
            canvas.drawLine(canvas.getWidth() / 3.0f, 0.0f, canvas.getWidth() / 3.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(2.0f * canvas.getWidth() / 3.0f, 0.0f, 2.0f * canvas.getWidth() / 3.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(0.0f, canvas.getHeight() / 3.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 3.0f, p);
            canvas.drawLine(0.0f, 2.0f * canvas.getHeight() / 3.0f, canvas.getWidth() - 1.0f, 2.0f * canvas.getHeight() / 3.0f, p);
            break;
        case "preference_grid_phi_3x3":
            p.setColor(Color.WHITE);
            canvas.drawLine(canvas.getWidth() / 2.618f, 0.0f, canvas.getWidth() / 2.618f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(1.618f * canvas.getWidth() / 2.618f, 0.0f, 1.618f * canvas.getWidth() / 2.618f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(0.0f, canvas.getHeight() / 2.618f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.618f, p);
            canvas.drawLine(0.0f, 1.618f * canvas.getHeight() / 2.618f, canvas.getWidth() - 1.0f, 1.618f * canvas.getHeight() / 2.618f, p);
            break;
        case "preference_grid_4x2":
            p.setColor(Color.GRAY);
            canvas.drawLine(canvas.getWidth() / 4.0f, 0.0f, canvas.getWidth() / 4.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(canvas.getWidth() / 2.0f, 0.0f, canvas.getWidth() / 2.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(3.0f * canvas.getWidth() / 4.0f, 0.0f, 3.0f * canvas.getWidth() / 4.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(0.0f, canvas.getHeight() / 2.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.0f, p);
            p.setColor(Color.WHITE);
            // convert dps to pixels
            int crosshairs_radius = (int) (20 * scale + 0.5f);
            canvas.drawLine(canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f - crosshairs_radius, canvas.getWidth() / 2.0f, canvas.getHeight() / 2.0f + crosshairs_radius, p);
            canvas.drawLine(canvas.getWidth() / 2.0f - crosshairs_radius, canvas.getHeight() / 2.0f, canvas.getWidth() / 2.0f + crosshairs_radius, canvas.getHeight() / 2.0f, p);
            break;
        case "preference_grid_crosshair":
            p.setColor(Color.WHITE);
            canvas.drawLine(canvas.getWidth() / 2.0f, 0.0f, canvas.getWidth() / 2.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(0.0f, canvas.getHeight() / 2.0f, canvas.getWidth() - 1.0f, canvas.getHeight() / 2.0f, p);
            break;
        case "preference_grid_golden_spiral_right":
        case "preference_grid_golden_spiral_left":
        case "preference_grid_golden_spiral_upside_down_right":
        case "preference_grid_golden_spiral_upside_down_left":
            canvas.save();
            switch(preference_grid_pref) {
                case "preference_grid_golden_spiral_left":
                    canvas.scale(-1.0f, 1.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f);
                    break;
                case "preference_grid_golden_spiral_right":
                    // no transformation needed
                    break;
                case "preference_grid_golden_spiral_upside_down_left":
                    canvas.rotate(180.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f);
                    break;
                case "preference_grid_golden_spiral_upside_down_right":
                    canvas.scale(1.0f, -1.0f, canvas.getWidth() * 0.5f, canvas.getHeight() * 0.5f);
                    break;
            }
            p.setColor(Color.WHITE);
            p.setStyle(Paint.Style.STROKE);
            p.setStrokeWidth(stroke_width);
            int fibb = 34;
            int fibb_n = 21;
            int left = 0, top = 0;
            int full_width = canvas.getWidth();
            int full_height = canvas.getHeight();
            int width = (int) (full_width * ((double) fibb_n) / (double) (fibb));
            int height = full_height;
            for (int count = 0; count < 2; count++) {
                canvas.save();
                draw_rect.set(left, top, left + width, top + height);
                canvas.clipRect(draw_rect);
                canvas.drawRect(draw_rect, p);
                draw_rect.set(left, top, left + 2 * width, top + 2 * height);
                canvas.drawOval(draw_rect, p);
                canvas.restore();
                int old_fibb = fibb;
                fibb = fibb_n;
                fibb_n = old_fibb - fibb;
                left += width;
                full_width = full_width - width;
                width = full_width;
                height = (int) (height * ((double) fibb_n) / (double) (fibb));
                canvas.save();
                draw_rect.set(left, top, left + width, top + height);
                canvas.clipRect(draw_rect);
                canvas.drawRect(draw_rect, p);
                draw_rect.set(left - width, top, left + width, top + 2 * height);
                canvas.drawOval(draw_rect, p);
                canvas.restore();
                old_fibb = fibb;
                fibb = fibb_n;
                fibb_n = old_fibb - fibb;
                top += height;
                full_height = full_height - height;
                height = full_height;
                width = (int) (width * ((double) fibb_n) / (double) (fibb));
                left += full_width - width;
                canvas.save();
                draw_rect.set(left, top, left + width, top + height);
                canvas.clipRect(draw_rect);
                canvas.drawRect(draw_rect, p);
                draw_rect.set(left - width, top - height, left + width, top + height);
                canvas.drawOval(draw_rect, p);
                canvas.restore();
                old_fibb = fibb;
                fibb = fibb_n;
                fibb_n = old_fibb - fibb;
                full_width = full_width - width;
                width = full_width;
                left -= width;
                height = (int) (height * ((double) fibb_n) / (double) (fibb));
                top += full_height - height;
                canvas.save();
                draw_rect.set(left, top, left + width, top + height);
                canvas.clipRect(draw_rect);
                canvas.drawRect(draw_rect, p);
                draw_rect.set(left, top - height, left + 2 * width, top + height);
                canvas.drawOval(draw_rect, p);
                canvas.restore();
                old_fibb = fibb;
                fibb = fibb_n;
                fibb_n = old_fibb - fibb;
                full_height = full_height - height;
                height = full_height;
                top -= height;
                width = (int) (width * ((double) fibb_n) / (double) (fibb));
            }
            canvas.restore();
            // reset
            p.setStyle(Paint.Style.FILL);
            break;
        case "preference_grid_golden_triangle_1":
        case "preference_grid_golden_triangle_2":
            p.setColor(Color.WHITE);
            double theta = Math.atan2(canvas.getWidth(), canvas.getHeight());
            double dist = canvas.getHeight() * Math.cos(theta);
            float dist_x = (float) (dist * Math.sin(theta));
            float dist_y = (float) (dist * Math.cos(theta));
            if (preference_grid_pref.equals("preference_grid_golden_triangle_1")) {
                canvas.drawLine(0.0f, canvas.getHeight() - 1.0f, canvas.getWidth() - 1.0f, 0.0f, p);
                canvas.drawLine(0.0f, 0.0f, dist_x, canvas.getHeight() - dist_y, p);
                canvas.drawLine(canvas.getWidth() - 1.0f - dist_x, dist_y - 1.0f, canvas.getWidth() - 1.0f, canvas.getHeight() - 1.0f, p);
            } else {
                canvas.drawLine(0.0f, 0.0f, canvas.getWidth() - 1.0f, canvas.getHeight() - 1.0f, p);
                canvas.drawLine(canvas.getWidth() - 1.0f, 0.0f, canvas.getWidth() - 1.0f - dist_x, canvas.getHeight() - dist_y, p);
                canvas.drawLine(dist_x, dist_y - 1.0f, 0.0f, canvas.getHeight() - 1.0f, p);
            }
            break;
        case "preference_grid_diagonals":
            p.setColor(Color.WHITE);
            canvas.drawLine(0.0f, 0.0f, canvas.getHeight() - 1.0f, canvas.getHeight() - 1.0f, p);
            canvas.drawLine(canvas.getHeight() - 1.0f, 0.0f, 0.0f, canvas.getHeight() - 1.0f, p);
            int diff = canvas.getWidth() - canvas.getHeight();
            if (diff > 0) {
                canvas.drawLine(diff, 0.0f, diff + canvas.getHeight() - 1.0f, canvas.getHeight() - 1.0f, p);
                canvas.drawLine(diff + canvas.getHeight() - 1.0f, 0.0f, diff, canvas.getHeight() - 1.0f, p);
            }
            break;
    }
}
Also used : CameraController(net.sourceforge.opencamera.CameraController.CameraController) Preview(net.sourceforge.opencamera.Preview.Preview) Paint(android.graphics.Paint)

Aggregations

CameraController (net.sourceforge.opencamera.CameraController.CameraController)13 Paint (android.graphics.Paint)10 Preview (net.sourceforge.opencamera.Preview.Preview)8 Point (android.graphics.Point)2 Rect (android.graphics.Rect)2 SuppressLint (android.annotation.SuppressLint)1 Activity (android.app.Activity)1 Intent (android.content.Intent)1 SharedPreferences (android.content.SharedPreferences)1 Bitmap (android.graphics.Bitmap)1 Matrix (android.graphics.Matrix)1 CamcorderProfile (android.media.CamcorderProfile)1 AsyncTask (android.os.AsyncTask)1 Handler (android.os.Handler)1 Pair (android.util.Pair)1 View (android.view.View)1 ArrayList (java.util.ArrayList)1 CameraController1 (net.sourceforge.opencamera.CameraController.CameraController1)1 CameraController2 (net.sourceforge.opencamera.CameraController.CameraController2)1 CameraControllerException (net.sourceforge.opencamera.CameraController.CameraControllerException)1