use of android.media.CamcorderProfile in project OpenCamera by ageback.
the class Preview method getCamcorderProfileDescription.
public String getCamcorderProfileDescription(String quality) {
if (camera_controller == null)
return "";
CamcorderProfile profile = getCamcorderProfile(quality);
String highest = "";
if (profile.quality == CamcorderProfile.QUALITY_HIGH) {
highest = "Highest: ";
}
String type = "";
if (profile.videoFrameWidth == 3840 && profile.videoFrameHeight == 2160) {
type = "4K Ultra HD ";
} else if (profile.videoFrameWidth == 1920 && profile.videoFrameHeight == 1080) {
type = "Full HD ";
} else if (profile.videoFrameWidth == 1280 && profile.videoFrameHeight == 720) {
type = "HD ";
} else if (profile.videoFrameWidth == 720 && profile.videoFrameHeight == 480) {
type = "SD ";
} else if (profile.videoFrameWidth == 640 && profile.videoFrameHeight == 480) {
type = "VGA ";
} else if (profile.videoFrameWidth == 352 && profile.videoFrameHeight == 288) {
type = "CIF ";
} else if (profile.videoFrameWidth == 320 && profile.videoFrameHeight == 240) {
type = "QVGA ";
} else if (profile.videoFrameWidth == 176 && profile.videoFrameHeight == 144) {
type = "QCIF ";
}
return highest + type + profile.videoFrameWidth + "x" + profile.videoFrameHeight + " " + getAspectRatioMPString(profile.videoFrameWidth, profile.videoFrameHeight);
}
use of android.media.CamcorderProfile in project OpenCamera by ageback.
the class Preview method getVideoProfile.
/**
* Returns a profile describing the currently selected video quality. The returned VideoProfile
* will usually encapsulate a CamcorderProfile (VideoProfile.getCamcorderProfile() will return
* non-null), but not always (e.g., for slow motion mode).
*/
public VideoProfile getVideoProfile() {
VideoProfile video_profile;
// but it does work if we explicitly set the resolution (at least tested on an S5)
if (camera_controller == null) {
video_profile = new VideoProfile();
Log.e(TAG, "camera not opened! returning default video profile for QUALITY_HIGH");
return video_profile;
}
/*if( video_high_speed ) {
// return a video profile for a high speed frame rate - note that if we have a capture rate factor of say 0.25x,
// the actual fps and bitrate of the resultant video would also be scaled by a factor of 0.25x
//return new VideoProfile(MediaRecorder.AudioEncoder.AAC, MediaRecorder.OutputFormat.WEBM, 20000000,
// MediaRecorder.VideoEncoder.VP8, this.video_high_speed_size.height, 120,
// this.video_high_speed_size.width);
return new VideoProfile(MediaRecorder.AudioEncoder.AAC, MediaRecorder.OutputFormat.MPEG_4, 4*14000000,
MediaRecorder.VideoEncoder.H264, this.video_high_speed_size.height, 120,
this.video_high_speed_size.width);
}*/
// Get user settings
boolean record_audio = applicationInterface.getRecordAudioPref();
String channels_value = applicationInterface.getRecordAudioChannelsPref();
String fps_value = applicationInterface.getVideoFPSPref();
String bitrate_value = applicationInterface.getVideoBitratePref();
boolean force4k = applicationInterface.getForce4KPref();
// Use CamcorderProfile just to get the current sizes and defaults.
{
CamcorderProfile cam_profile;
int cameraId = camera_controller.getCameraId();
if (force4k) {
if (MyDebug.LOG)
Log.d(TAG, "force 4K UHD video");
cam_profile = CamcorderProfile.get(cameraId, CamcorderProfile.QUALITY_HIGH);
cam_profile.videoFrameWidth = 3840;
cam_profile.videoFrameHeight = 2160;
// need a higher bitrate for the better quality - this is roughly based on the bitrate used by an S5's native camera app at 4K (47.6 Mbps, compared to 16.9 Mbps which is what's returned by the QUALITY_HIGH profile)
cam_profile.videoBitRate = (int) (cam_profile.videoBitRate * 2.8);
} else if (this.video_quality_handler.getCurrentVideoQualityIndex() != -1) {
cam_profile = getCamcorderProfile(this.video_quality_handler.getCurrentVideoQuality());
} else {
cam_profile = null;
}
video_profile = cam_profile != null ? new VideoProfile(cam_profile) : new VideoProfile();
}
if (!fps_value.equals("default")) {
try {
int fps = Integer.parseInt(fps_value);
if (MyDebug.LOG)
Log.d(TAG, "fps: " + fps);
video_profile.videoFrameRate = fps;
video_profile.videoCaptureRate = fps;
} catch (NumberFormatException exception) {
if (MyDebug.LOG)
Log.d(TAG, "fps invalid format, can't parse to int: " + fps_value);
}
}
if (!bitrate_value.equals("default")) {
try {
int bitrate = Integer.parseInt(bitrate_value);
if (MyDebug.LOG)
Log.d(TAG, "bitrate: " + bitrate);
video_profile.videoBitRate = bitrate;
} catch (NumberFormatException exception) {
if (MyDebug.LOG)
Log.d(TAG, "bitrate invalid format, can't parse to int: " + bitrate_value);
}
}
final int min_high_speed_bitrate_c = 4 * 14000000;
if (video_high_speed && video_profile.videoBitRate < min_high_speed_bitrate_c) {
video_profile.videoBitRate = min_high_speed_bitrate_c;
if (MyDebug.LOG)
Log.d(TAG, "set minimum bitrate for high speed: " + video_profile.videoBitRate);
}
if (has_capture_rate_factor) {
if (MyDebug.LOG)
Log.d(TAG, "set video profile frame rate for slow motion or timelapse");
// capture rate remains the same, and we adjust the frame rate of video
video_profile.videoFrameRate = (int) (video_profile.videoFrameRate * capture_rate_factor + 0.5f);
video_profile.videoBitRate = (int) (video_profile.videoBitRate * capture_rate_factor + 0.5f);
// audio not recorded with slow motion video
record_audio = false;
}
video_profile.videoSource = using_android_l ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && record_audio && ContextCompat.checkSelfPermission(getContext(), Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
// we restrict check to Android 6 or later just in case, see note in LocationSupplier.setupLocationListener()
if (MyDebug.LOG)
Log.e(TAG, "don't have RECORD_AUDIO permission");
// don't show a toast here, otherwise we'll keep showing toasts whenever getVideoProfile() is called; we only
// should show a toast when user starts recording video; so we indicate this via the no_audio_permission flag
record_audio = false;
video_profile.no_audio_permission = true;
}
video_profile.record_audio = record_audio;
if (record_audio) {
String pref_audio_src = applicationInterface.getRecordAudioSourcePref();
if (MyDebug.LOG)
Log.d(TAG, "pref_audio_src: " + pref_audio_src);
switch(pref_audio_src) {
case "audio_src_mic":
video_profile.audioSource = MediaRecorder.AudioSource.MIC;
break;
case "audio_src_default":
video_profile.audioSource = MediaRecorder.AudioSource.DEFAULT;
break;
case "audio_src_voice_communication":
video_profile.audioSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
break;
case "audio_src_camcorder":
default:
video_profile.audioSource = MediaRecorder.AudioSource.CAMCORDER;
break;
}
if (MyDebug.LOG)
Log.d(TAG, "audio_source: " + video_profile.audioSource);
if (MyDebug.LOG)
Log.d(TAG, "pref_audio_channels: " + channels_value);
if (channels_value.equals("audio_mono")) {
video_profile.audioChannels = 1;
} else if (channels_value.equals("audio_stereo")) {
video_profile.audioChannels = 2;
}
// else keep with the value already stored in VideoProfile (set from the CamcorderProfile)
}
if (MyDebug.LOG)
Log.d(TAG, "returning video_profile: " + video_profile);
return video_profile;
}
use of android.media.CamcorderProfile in project OpenCamera by ageback.
the class Preview method getSupportedVideoQuality.
/**
* Returns the supported video "qualities", but unlike
* getVideoQualityHander().getSupportedVideoQuality(), allows filtering to the supplied
* fps_value.
* @param fps_value If not "default", the returned video qualities will be filtered to those that supported the requested
* frame rate.
*/
public List<String> getSupportedVideoQuality(String fps_value) {
if (MyDebug.LOG)
Log.d(TAG, "getSupportedVideoQuality: " + fps_value);
if (!fps_value.equals("default") && supports_video_high_speed) {
try {
int fps = Integer.parseInt(fps_value);
if (MyDebug.LOG)
Log.d(TAG, "fps: " + fps);
List<String> filtered_video_quality = new ArrayList<>();
for (String quality : video_quality_handler.getSupportedVideoQuality()) {
if (MyDebug.LOG)
Log.d(TAG, "quality: " + quality);
CamcorderProfile profile = getCamcorderProfile(quality);
if (MyDebug.LOG) {
Log.d(TAG, " width: " + profile.videoFrameWidth);
Log.d(TAG, " height: " + profile.videoFrameHeight);
}
CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(profile.videoFrameWidth, profile.videoFrameHeight, fps);
if (best_video_size != null) {
if (MyDebug.LOG)
Log.d(TAG, " requested frame rate is supported");
filtered_video_quality.add(quality);
} else {
if (MyDebug.LOG)
Log.d(TAG, " requested frame rate is NOT supported");
}
}
return filtered_video_quality;
} catch (NumberFormatException exception) {
if (MyDebug.LOG)
Log.d(TAG, "fps invalid format, can't parse to int: " + fps_value);
}
}
return video_quality_handler.getSupportedVideoQuality();
}
use of android.media.CamcorderProfile in project OpenCamera by ageback.
the class Preview method setupCameraParameters.
private void setupCameraParameters() throws CameraControllerException {
if (MyDebug.LOG)
Log.d(TAG, "setupCameraParameters()");
long debug_time = 0;
if (MyDebug.LOG) {
debug_time = System.currentTimeMillis();
}
{
// this doesn't appear to apply to Camera2 API, but we still might as well set scene mode first
if (MyDebug.LOG)
Log.d(TAG, "set up scene mode");
String value = applicationInterface.getSceneModePref();
if (MyDebug.LOG)
Log.d(TAG, "saved scene mode: " + value);
CameraController.SupportedValues supported_values = camera_controller.setSceneMode(value);
if (supported_values != null) {
scene_modes = supported_values.values;
// now save, so it's available for PreferenceActivity
applicationInterface.setSceneModePref(supported_values.selected_value);
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearSceneModePref();
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after setting scene mode: " + (System.currentTimeMillis() - debug_time));
}
{
// grab all read-only info from parameters
if (MyDebug.LOG)
Log.d(TAG, "grab info from parameters");
CameraController.CameraFeatures camera_features = camera_controller.getCameraFeatures();
this.has_zoom = camera_features.is_zoom_supported;
if (this.has_zoom) {
this.max_zoom_factor = camera_features.max_zoom;
this.zoom_ratios = camera_features.zoom_ratios;
}
this.minimum_focus_distance = camera_features.minimum_focus_distance;
this.supports_face_detection = camera_features.supports_face_detection;
this.sizes = camera_features.picture_sizes;
supported_flash_values = camera_features.supported_flash_values;
supported_focus_values = camera_features.supported_focus_values;
this.max_num_focus_areas = camera_features.max_num_focus_areas;
this.is_exposure_lock_supported = camera_features.is_exposure_lock_supported;
this.supports_video_stabilization = camera_features.is_video_stabilization_supported;
this.supports_photo_video_recording = camera_features.is_photo_video_recording_supported;
this.can_disable_shutter_sound = camera_features.can_disable_shutter_sound;
this.supports_white_balance_temperature = camera_features.supports_white_balance_temperature;
this.min_temperature = camera_features.min_temperature;
this.max_temperature = camera_features.max_temperature;
this.supports_iso_range = camera_features.supports_iso_range;
this.min_iso = camera_features.min_iso;
this.max_iso = camera_features.max_iso;
this.supports_exposure_time = camera_features.supports_exposure_time;
this.min_exposure_time = camera_features.min_exposure_time;
this.max_exposure_time = camera_features.max_exposure_time;
this.min_exposure = camera_features.min_exposure;
this.max_exposure = camera_features.max_exposure;
this.exposure_step = camera_features.exposure_step;
this.supports_expo_bracketing = camera_features.supports_expo_bracketing;
this.max_expo_bracketing_n_images = camera_features.max_expo_bracketing_n_images;
this.supports_raw = camera_features.supports_raw;
this.supports_burst = camera_features.supports_burst;
this.view_angle_x = camera_features.view_angle_x;
this.view_angle_y = camera_features.view_angle_y;
this.supports_video_high_speed = camera_features.video_sizes_high_speed != null && camera_features.video_sizes_high_speed.size() > 0;
this.video_quality_handler.setVideoSizes(camera_features.video_sizes);
this.video_quality_handler.setVideoSizesHighSpeed(camera_features.video_sizes_high_speed);
this.supported_preview_sizes = camera_features.preview_sizes;
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after getting read only info: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up face detection");
// get face detection supported
this.faces_detected = null;
if (this.supports_face_detection) {
this.using_face_detection = applicationInterface.getFaceDetectionPref();
} else {
this.using_face_detection = false;
}
if (MyDebug.LOG) {
Log.d(TAG, "supports_face_detection?: " + supports_face_detection);
Log.d(TAG, "using_face_detection?: " + using_face_detection);
}
if (this.using_face_detection) {
class MyFaceDetectionListener implements CameraController.FaceDetectionListener {
final Handler handler = new Handler();
int last_n_faces = -1;
FaceLocation last_face_location = FaceLocation.FACELOCATION_UNSET;
/**
* Note, at least for Camera2 API, onFaceDetection() isn't called on UI thread.
*/
@Override
public void onFaceDetection(final CameraController.Face[] faces) {
if (MyDebug.LOG)
Log.d(TAG, "onFaceDetection: " + faces.length + " : " + Arrays.toString(faces));
if (camera_controller == null) {
// can get a crash in some cases when switching camera when face detection is on (at least for Camera2)
Activity activity = (Activity) Preview.this.getContext();
activity.runOnUiThread(new Runnable() {
public void run() {
faces_detected = null;
}
});
return;
}
// don't assign to faces_detected yet, as that has to be done on the UI thread
// We don't synchronize on faces_detected, as the array may be passed to other
// classes via getFacesDetected(). Although that function could copy instead,
// that would mean an allocation in every frame in DrawPreview.
// Easier to just do the assignment on the UI thread.
Activity activity = (Activity) Preview.this.getContext();
activity.runOnUiThread(new Runnable() {
public void run() {
// convert rects to preview screen space - also needs to be done on UI thread
// (otherwise can have crashes if camera_controller becomes null in the meantime)
final Matrix matrix = getCameraToPreviewMatrix();
for (CameraController.Face face : faces) {
face_rect.set(face.rect);
matrix.mapRect(face_rect);
face_rect.round(face.rect);
}
reportFaces(faces);
if (faces_detected == null || faces_detected.length != faces.length) {
// avoid unnecessary reallocations
if (MyDebug.LOG)
Log.d(TAG, "allocate new faces_detected");
faces_detected = new CameraController.Face[faces.length];
}
System.arraycopy(faces, 0, faces_detected, 0, faces.length);
}
});
}
/**
* Accessibility: report number of faces for talkback etc.
*/
private void reportFaces(CameraController.Face[] local_faces) {
// View.announceForAccessibility requires JELLY_BEAN
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN && accessibility_manager.isEnabled() && accessibility_manager.isTouchExplorationEnabled()) {
int n_faces = local_faces.length;
FaceLocation face_location = FaceLocation.FACELOCATION_UNKNOWN;
if (n_faces > 0) {
// set face_location
float avg_x = 0, avg_y = 0;
final float bdry_frac_c = 0.35f;
boolean all_centre = true;
for (CameraController.Face face : local_faces) {
float face_x = face.rect.centerX();
float face_y = face.rect.centerY();
face_x /= (float) cameraSurface.getView().getWidth();
face_y /= (float) cameraSurface.getView().getHeight();
if (all_centre) {
if (face_x < bdry_frac_c || face_x > 1.0f - bdry_frac_c || face_y < bdry_frac_c || face_y > 1.0f - bdry_frac_c)
all_centre = false;
}
avg_x += face_x;
avg_y += face_y;
}
avg_x /= n_faces;
avg_y /= n_faces;
if (MyDebug.LOG) {
Log.d(TAG, " avg_x: " + avg_x);
Log.d(TAG, " avg_y: " + avg_y);
Log.d(TAG, " ui_rotation: " + ui_rotation);
}
if (all_centre) {
face_location = FaceLocation.FACELOCATION_CENTRE;
} else {
switch(ui_rotation) {
case 0:
break;
case 90:
{
float temp = avg_x;
avg_x = avg_y;
avg_y = 1.0f - temp;
break;
}
case 180:
avg_x = 1.0f - avg_x;
avg_y = 1.0f - avg_y;
break;
case 270:
{
float temp = avg_x;
avg_x = 1.0f - avg_y;
avg_y = temp;
break;
}
}
if (MyDebug.LOG) {
Log.d(TAG, " avg_x: " + avg_x);
Log.d(TAG, " avg_y: " + avg_y);
}
if (avg_x < bdry_frac_c)
face_location = FaceLocation.FACELOCATION_LEFT;
else if (avg_x > 1.0f - bdry_frac_c)
face_location = FaceLocation.FACELOCATION_RIGHT;
else if (avg_y < bdry_frac_c)
face_location = FaceLocation.FACELOCATION_TOP;
else if (avg_y > 1.0f - bdry_frac_c)
face_location = FaceLocation.FACELOCATION_BOTTOM;
}
}
if (n_faces != last_n_faces || face_location != last_face_location) {
if (n_faces == 0 && last_n_faces == -1) {
// only say 0 faces detected if previously the number was non-zero
} else {
String string = n_faces + " " + getContext().getResources().getString(n_faces == 1 ? R.string.face_detected : R.string.faces_detected);
if (n_faces > 0 && face_location != FaceLocation.FACELOCATION_UNKNOWN) {
switch(face_location) {
case FACELOCATION_CENTRE:
string += " " + getContext().getResources().getString(R.string.centre_of_screen);
break;
case FACELOCATION_LEFT:
string += " " + getContext().getResources().getString(R.string.left_of_screen);
break;
case FACELOCATION_RIGHT:
string += " " + getContext().getResources().getString(R.string.right_of_screen);
break;
case FACELOCATION_TOP:
string += " " + getContext().getResources().getString(R.string.top_of_screen);
break;
case FACELOCATION_BOTTOM:
string += " " + getContext().getResources().getString(R.string.bottom_of_screen);
break;
}
}
final String string_f = string;
if (MyDebug.LOG)
Log.d(TAG, string);
// to avoid having a big queue of saying "one face detected, two faces detected" etc, we only report
// after a delay, cancelling any that were previously queued
handler.removeCallbacksAndMessages(null);
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (MyDebug.LOG)
Log.d(TAG, "announceForAccessibility: " + string_f);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
Preview.this.getView().announceForAccessibility(string_f);
}
}
}, 500);
}
last_n_faces = n_faces;
last_face_location = face_location;
}
}
}
}
camera_controller.setFaceDetectionListener(new MyFaceDetectionListener());
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after setting face detection: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up video stabilization");
if (this.supports_video_stabilization) {
boolean using_video_stabilization = applicationInterface.getVideoStabilizationPref();
if (MyDebug.LOG)
Log.d(TAG, "using_video_stabilization?: " + using_video_stabilization);
camera_controller.setVideoStabilization(using_video_stabilization);
}
if (MyDebug.LOG)
Log.d(TAG, "supports_video_stabilization?: " + supports_video_stabilization);
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after video stabilization: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up color effect");
String value = applicationInterface.getColorEffectPref();
if (MyDebug.LOG)
Log.d(TAG, "saved color effect: " + value);
CameraController.SupportedValues supported_values = camera_controller.setColorEffect(value);
if (supported_values != null) {
color_effects = supported_values.values;
// now save, so it's available for PreferenceActivity
applicationInterface.setColorEffectPref(supported_values.selected_value);
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearColorEffectPref();
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after color effect: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up white balance");
String value = applicationInterface.getWhiteBalancePref();
if (MyDebug.LOG)
Log.d(TAG, "saved white balance: " + value);
CameraController.SupportedValues supported_values = camera_controller.setWhiteBalance(value);
if (supported_values != null) {
white_balances = supported_values.values;
// now save, so it's available for PreferenceActivity
applicationInterface.setWhiteBalancePref(supported_values.selected_value);
if (supported_values.selected_value.equals("manual") && this.supports_white_balance_temperature) {
int temperature = applicationInterface.getWhiteBalanceTemperaturePref();
camera_controller.setWhiteBalanceTemperature(temperature);
if (MyDebug.LOG)
Log.d(TAG, "saved white balance: " + value);
}
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearWhiteBalancePref();
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after white balance: " + (System.currentTimeMillis() - debug_time));
}
// must be done before setting flash modes, as we may remove flash modes if in manual mode
if (MyDebug.LOG)
Log.d(TAG, "set up iso");
String value = applicationInterface.getISOPref();
if (MyDebug.LOG)
Log.d(TAG, "saved iso: " + value);
boolean is_manual_iso = false;
if (supports_iso_range) {
// in this mode, we can set any ISO value from min to max
// if supports_iso_range==true, caller shouldn't be using getSupportedISOs()
this.isos = null;
// now set the desired ISO mode/value
if (value.equals(CameraController.ISO_DEFAULT)) {
if (MyDebug.LOG)
Log.d(TAG, "setting auto iso");
camera_controller.setManualISO(false, 0);
} else {
int iso = parseManualISOValue(value);
if (iso >= 0) {
is_manual_iso = true;
if (MyDebug.LOG)
Log.d(TAG, "iso: " + iso);
camera_controller.setManualISO(true, iso);
} else {
// failed to parse
camera_controller.setManualISO(false, 0);
// so we switch the preferences back to auto mode, rather than the invalid value
value = CameraController.ISO_DEFAULT;
}
// now save, so it's available for PreferenceActivity
applicationInterface.setISOPref(value);
}
} else {
// in this mode, any support for ISO is only the specific ISOs offered by the CameraController
CameraController.SupportedValues supported_values = camera_controller.setISO(value);
if (supported_values != null) {
isos = supported_values.values;
if (!supported_values.selected_value.equals(CameraController.ISO_DEFAULT)) {
if (MyDebug.LOG)
Log.d(TAG, "has manual iso");
is_manual_iso = true;
}
// now save, so it's available for PreferenceActivity
applicationInterface.setISOPref(supported_values.selected_value);
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearISOPref();
}
}
if (is_manual_iso) {
if (supports_exposure_time) {
long exposure_time_value = applicationInterface.getExposureTimePref();
if (MyDebug.LOG)
Log.d(TAG, "saved exposure_time: " + exposure_time_value);
if (exposure_time_value < getMinimumExposureTime())
exposure_time_value = getMinimumExposureTime();
else if (exposure_time_value > getMaximumExposureTime())
exposure_time_value = getMaximumExposureTime();
camera_controller.setExposureTime(exposure_time_value);
// now save
applicationInterface.setExposureTimePref(exposure_time_value);
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearExposureTimePref();
}
if (this.using_android_l && supported_flash_values != null) {
// flash modes not supported when using Camera2 and manual ISO
// (it's unclear flash is useful - ideally we'd at least offer torch, but ISO seems to reset to 100 when flash/torch is on!)
supported_flash_values = null;
if (MyDebug.LOG)
Log.d(TAG, "flash not supported in Camera2 manual mode");
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after manual iso: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG) {
Log.d(TAG, "set up exposure compensation");
Log.d(TAG, "min_exposure: " + min_exposure);
Log.d(TAG, "max_exposure: " + max_exposure);
}
// get min/max exposure
exposures = null;
if (min_exposure != 0 || max_exposure != 0) {
exposures = new ArrayList<>();
for (int i = min_exposure; i <= max_exposure; i++) {
exposures.add("" + i);
}
// if in manual ISO mode, we still want to get the valid exposure compensations, but shouldn't set exposure compensation
if (!is_manual_iso) {
int exposure = applicationInterface.getExposureCompensationPref();
if (exposure < min_exposure || exposure > max_exposure) {
exposure = 0;
if (MyDebug.LOG)
Log.d(TAG, "saved exposure not supported, reset to 0");
if (exposure < min_exposure || exposure > max_exposure) {
if (MyDebug.LOG)
Log.d(TAG, "zero isn't an allowed exposure?! reset to min " + min_exposure);
exposure = min_exposure;
}
}
camera_controller.setExposureCompensation(exposure);
// now save, so it's available for PreferenceActivity
applicationInterface.setExposureCompensationPref(exposure);
}
} else {
// delete key in case it's present (e.g., if feature no longer available due to change in OS, or switching APIs)
applicationInterface.clearExposureCompensationPref();
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after exposures: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up picture sizes");
if (MyDebug.LOG) {
for (int i = 0; i < sizes.size(); i++) {
CameraController.Size size = sizes.get(i);
Log.d(TAG, "supported picture size: " + size.width + " , " + size.height);
}
}
current_size_index = -1;
Pair<Integer, Integer> resolution = applicationInterface.getCameraResolutionPref();
if (resolution != null) {
int resolution_w = resolution.first;
int resolution_h = resolution.second;
// now find size in valid list
for (int i = 0; i < sizes.size() && current_size_index == -1; i++) {
CameraController.Size size = sizes.get(i);
if (size.width == resolution_w && size.height == resolution_h) {
current_size_index = i;
if (MyDebug.LOG)
Log.d(TAG, "set current_size_index to: " + current_size_index);
}
}
if (current_size_index == -1) {
if (MyDebug.LOG)
Log.e(TAG, "failed to find valid size");
}
}
if (current_size_index == -1) {
// set to largest
CameraController.Size current_size = null;
for (int i = 0; i < sizes.size(); i++) {
CameraController.Size size = sizes.get(i);
if (current_size == null || size.width * size.height > current_size.width * current_size.height) {
current_size_index = i;
current_size = size;
}
}
}
if (current_size_index != -1) {
CameraController.Size current_size = sizes.get(current_size_index);
if (MyDebug.LOG)
Log.d(TAG, "Current size index " + current_size_index + ": " + current_size.width + ", " + current_size.height);
// now save, so it's available for PreferenceActivity
applicationInterface.setCameraResolutionPref(current_size.width, current_size.height);
}
// size set later in setPreviewSize()
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after picture sizes: " + (System.currentTimeMillis() - debug_time));
}
{
int image_quality = applicationInterface.getImageQualityPref();
if (MyDebug.LOG)
Log.d(TAG, "set up jpeg quality: " + image_quality);
camera_controller.setJpegQuality(image_quality);
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after jpeg quality: " + (System.currentTimeMillis() - debug_time));
}
// get available sizes
initialiseVideoSizes();
initialiseVideoQuality();
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after video sizes: " + (System.currentTimeMillis() - debug_time));
}
String video_quality_value_s = applicationInterface.getVideoQualityPref();
if (MyDebug.LOG)
Log.d(TAG, "video_quality_value: " + video_quality_value_s);
video_quality_handler.setCurrentVideoQualityIndex(-1);
if (video_quality_value_s.length() > 0) {
// now find value in valid list
for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size() && video_quality_handler.getCurrentVideoQualityIndex() == -1; i++) {
if (video_quality_handler.getSupportedVideoQuality().get(i).equals(video_quality_value_s)) {
video_quality_handler.setCurrentVideoQualityIndex(i);
if (MyDebug.LOG)
Log.d(TAG, "set current_video_quality to: " + video_quality_handler.getCurrentVideoQualityIndex());
}
}
if (video_quality_handler.getCurrentVideoQualityIndex() == -1) {
if (MyDebug.LOG)
Log.e(TAG, "failed to find valid video_quality");
}
}
if (video_quality_handler.getCurrentVideoQualityIndex() == -1 && video_quality_handler.getSupportedVideoQuality().size() > 0) {
// default to FullHD if available, else pick highest quality
// (FullHD will give smaller file sizes and generally give better performance than 4K so probably better for most users; also seems to suffer from less problems when using manual ISO in Camera2 API)
// start with highest quality
video_quality_handler.setCurrentVideoQualityIndex(0);
for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size(); i++) {
if (MyDebug.LOG)
Log.d(TAG, "check video quality: " + video_quality_handler.getSupportedVideoQuality().get(i));
CamcorderProfile profile = getCamcorderProfile(video_quality_handler.getSupportedVideoQuality().get(i));
if (profile.videoFrameWidth == 1920 && profile.videoFrameHeight == 1080) {
video_quality_handler.setCurrentVideoQualityIndex(i);
break;
}
}
if (MyDebug.LOG)
Log.d(TAG, "set video_quality value to " + video_quality_handler.getCurrentVideoQuality());
}
if (video_quality_handler.getCurrentVideoQualityIndex() != -1) {
// now save, so it's available for PreferenceActivity
applicationInterface.setVideoQualityPref(video_quality_handler.getCurrentVideoQuality());
} else {
// This means video_quality_handler.getSupportedVideoQuality().size() is 0 - this could happen if the camera driver
// supports no camcorderprofiles? In this case, we shouldn't support video.
Log.e(TAG, "no video qualities found");
supports_video = false;
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after handling video quality: " + (System.currentTimeMillis() - debug_time));
}
if (supports_video) {
// set up high speed frame rates
// should be done after checking the requested video size is available
video_high_speed = false;
if (this.supports_video_high_speed) {
VideoProfile profile = getVideoProfile();
if (MyDebug.LOG)
Log.d(TAG, "check if we need high speed video for " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
CameraController.Size best_video_size = video_quality_handler.findVideoSizeForFrameRate(profile.videoFrameWidth, profile.videoFrameHeight, profile.videoCaptureRate);
if (best_video_size == null && video_quality_handler.getSupportedVideoSizesHighSpeed() != null) {
Log.e(TAG, "can't find match for capture rate: " + profile.videoCaptureRate + " and video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
// try falling back to one of the supported high speed resolutions
CameraController.Size requested_size = video_quality_handler.getMaxSupportedVideoSizeHighSpeed();
profile.videoFrameWidth = requested_size.width;
profile.videoFrameHeight = requested_size.height;
// now try again
best_video_size = CameraController.CameraFeatures.findSize(video_quality_handler.getSupportedVideoSizesHighSpeed(), requested_size, profile.videoCaptureRate, false);
if (best_video_size != null) {
if (MyDebug.LOG)
Log.d(TAG, "fall back to a supported video size for high speed fps");
// need to write back to the application
// so find the corresponding quality value
video_quality_handler.setCurrentVideoQualityIndex(-1);
for (int i = 0; i < video_quality_handler.getSupportedVideoQuality().size(); i++) {
if (MyDebug.LOG)
Log.d(TAG, "check video quality: " + video_quality_handler.getSupportedVideoQuality().get(i));
CamcorderProfile camcorder_profile = getCamcorderProfile(video_quality_handler.getSupportedVideoQuality().get(i));
if (camcorder_profile.videoFrameWidth == profile.videoFrameWidth && camcorder_profile.videoFrameHeight == profile.videoFrameHeight) {
video_quality_handler.setCurrentVideoQualityIndex(i);
break;
}
}
if (video_quality_handler.getCurrentVideoQualityIndex() != -1) {
if (MyDebug.LOG)
Log.d(TAG, "reset to video quality: " + video_quality_handler.getCurrentVideoQuality());
applicationInterface.setVideoQualityPref(video_quality_handler.getCurrentVideoQuality());
} else {
if (MyDebug.LOG)
Log.d(TAG, "but couldn't find a corresponding video quality");
best_video_size = null;
}
}
}
if (best_video_size == null) {
Log.e(TAG, "fps not supported for this video size: " + profile.videoFrameWidth + " x " + profile.videoFrameHeight + " at fps " + profile.videoCaptureRate);
// we'll end up trying to record at the requested resolution and fps even though these seem incompatible;
// the camera driver will either ignore the requested fps, or fail
} else if (best_video_size.high_speed) {
video_high_speed = true;
}
}
if (MyDebug.LOG)
Log.d(TAG, "video_high_speed?: " + video_high_speed);
}
if (is_video && video_high_speed && supports_iso_range && is_manual_iso) {
if (MyDebug.LOG)
Log.d(TAG, "manual mode not supported for video_high_speed");
camera_controller.setManualISO(false, 0);
is_manual_iso = false;
}
{
if (MyDebug.LOG) {
Log.d(TAG, "set up flash");
Log.d(TAG, "flash values: " + supported_flash_values);
}
current_flash_index = -1;
if (supported_flash_values != null && supported_flash_values.size() > 1) {
String flash_value = applicationInterface.getFlashPref();
if (flash_value.length() > 0) {
if (MyDebug.LOG)
Log.d(TAG, "found existing flash_value: " + flash_value);
if (!updateFlash(flash_value, false)) {
// don't need to save, as this is the value that's already saved
if (MyDebug.LOG)
Log.d(TAG, "flash value no longer supported!");
updateFlash(0, true);
}
} else {
if (MyDebug.LOG)
Log.d(TAG, "found no existing flash_value");
// see testTakePhotoFrontCameraScreenFlash
if (supported_flash_values.contains("flash_auto"))
updateFlash("flash_auto", true);
else
updateFlash("flash_off", true);
}
} else {
if (MyDebug.LOG)
Log.d(TAG, "flash not supported");
supported_flash_values = null;
}
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after setting up flash: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up focus");
current_focus_index = -1;
if (supported_focus_values != null && supported_focus_values.size() > 1) {
if (MyDebug.LOG)
Log.d(TAG, "focus values: " + supported_focus_values);
setFocusPref(true);
} else {
if (MyDebug.LOG)
Log.d(TAG, "focus not supported");
supported_focus_values = null;
}
/*supported_focus_values = new ArrayList<>();
supported_focus_values.add("focus_mode_auto");
supported_focus_values.add("focus_mode_infinity");
supported_focus_values.add("focus_mode_macro");
supported_focus_values.add("focus_mode_locked");
supported_focus_values.add("focus_mode_manual2");
supported_focus_values.add("focus_mode_fixed");
supported_focus_values.add("focus_mode_edof");
supported_focus_values.add("focus_mode_continuous_video");*/
/*View focusModeButton = (View) activity.findViewById(R.id.focus_mode);
focusModeButton.setVisibility(supported_focus_values != null && !immersive_mode ? View.VISIBLE : View.GONE);*/
}
{
float focus_distance_value = applicationInterface.getFocusDistancePref();
if (MyDebug.LOG)
Log.d(TAG, "saved focus_distance: " + focus_distance_value);
if (focus_distance_value < 0.0f)
focus_distance_value = 0.0f;
else if (focus_distance_value > minimum_focus_distance)
focus_distance_value = minimum_focus_distance;
camera_controller.setFocusDistance(focus_distance_value);
// now save
applicationInterface.setFocusDistancePref(focus_distance_value);
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: time after setting up focus: " + (System.currentTimeMillis() - debug_time));
}
{
if (MyDebug.LOG)
Log.d(TAG, "set up exposure lock");
// exposure lock should always default to false, as doesn't make sense to save it - we can't really preserve a "lock" after the camera is reopened
// also note that it isn't safe to lock the exposure before starting the preview
is_exposure_locked = false;
}
if (MyDebug.LOG) {
Log.d(TAG, "setupCameraParameters: total time for setting up camera parameters: " + (System.currentTimeMillis() - debug_time));
}
}
use of android.media.CamcorderProfile in project CameraKit-Android by flurgle.
the class Camera1 method prepareMediaRecorder.
private boolean prepareMediaRecorder(File videoFile, int maxDuration) throws IOException {
synchronized (mCameraLock) {
mCamera.unlock();
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setCamera(mCamera);
// mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = getCamcorderProfile(mVideoQuality);
mMediaRecorder.setProfile(profile);
if (videoFile == null)
videoFile = getVideoFile();
if (videoFile == null) {
return false;
}
mMediaRecorderOutputFile = videoFile;
mMediaRecorder.setOutputFile(videoFile.getPath());
mMediaRecorder.setPreviewDisplay(mPreview.getSurface());
mMediaRecorder.setOrientationHint(calculateCaptureRotation());
if (maxDuration > 0) {
mMediaRecorder.setMaxDuration(maxDuration);
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
stopVideo();
}
}
});
}
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
releaseMediaRecorder();
return false;
} catch (IOException e) {
releaseMediaRecorder();
return false;
}
return true;
}
}
Aggregations