use of org.havenapp.main.sensors.media.MotionAsyncTask in project haven by guardianproject.
the class Preview method surfaceCreated.
/**
* Called on the creation of the surface:
* setting camera parameters to lower possible resolution
* (preferred is 640x480)
* in order to minimize CPU usage
*/
public void surfaceCreated(SurfaceHolder holder) {
if (camera != null)
stopCamera();
/*
* The Surface has been created, acquire the camera and tell it where
* to draw.
* If the selected camera is the front one we open it
*/
switch(prefs.getCamera()) {
case PreferenceManager.FRONT:
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
int cameraCount = Camera.getNumberOfCameras();
for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
try {
camera = Camera.open(camIdx);
cameraFacing = Camera.CameraInfo.CAMERA_FACING_FRONT;
} catch (RuntimeException e) {
Log.e("Preview", "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
break;
case PreferenceManager.BACK:
camera = Camera.open();
cameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
break;
default:
camera = null;
break;
}
if (camera != null) {
final Camera.Parameters parameters = camera.getParameters();
try {
List<Size> sizesPreviews = parameters.getSupportedPreviewSizes();
Size bestSize = sizesPreviews.get(0);
for (int i = 1; i < sizesPreviews.size(); i++) {
if ((sizesPreviews.get(i).width * sizesPreviews.get(i).height) > (bestSize.width * bestSize.height)) {
bestSize = sizesPreviews.get(i);
}
}
parameters.setPreviewSize(bestSize.width, bestSize.height);
} catch (Exception e) {
Log.w("Camera", "Error setting camera preview size", e);
}
try {
List<int[]> ranges = parameters.getSupportedPreviewFpsRange();
int[] bestRange = ranges.get(0);
for (int i = 1; i < ranges.size(); i++) {
if (ranges.get(i)[1] > bestRange[1]) {
bestRange[0] = ranges.get(i)[0];
bestRange[1] = ranges.get(i)[1];
}
}
parameters.setPreviewFpsRange(bestRange[0], bestRange[1]);
} catch (Exception e) {
Log.w("Camera", "Error setting frames per second", e);
}
try {
parameters.setAutoExposureLock(false);
parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
} catch (Exception e) {
}
/*
* If the flash is needed
*/
if (prefs.getFlashActivation()) {
Log.i("Preview", "Flash activated");
parameters.setFlashMode(Parameters.FLASH_MODE_TORCH);
}
camera.setParameters(parameters);
try {
camera.setPreviewDisplay(mHolder);
camera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera cam) {
Camera.Size size;
try {
size = cam.getParameters().getPreviewSize();
} catch (RuntimeException e) {
return;
}
if (size == null)
return;
long now = System.currentTimeMillis();
if (now < Preview.this.lastTimestamp + PREVIEW_INTERVAL)
return;
if (!doingProcessing) {
Log.i("Preview", "Processing new image");
Preview.this.lastTimestamp = now;
task = new MotionAsyncTask(lastPic, data, size.width, size.height, updateHandler, motionSensitivity);
for (MotionAsyncTask.MotionListener listener : listeners) {
Log.i("Preview", "Added listener");
task.addListener(listener);
}
doingProcessing = true;
task.addListener(new MotionAsyncTask.MotionListener() {
public void onProcess(Bitmap oldBitmap, Bitmap newBitmap, Bitmap rawBitmap, boolean motionDetected) {
if (motionDetected) {
Log.i("MotionListener", "Motion detected");
if (serviceMessenger != null) {
Message message = new Message();
message.what = EventTrigger.CAMERA;
try {
File fileImageDir = new File(Environment.getExternalStorageDirectory(), prefs.getImagePath());
fileImageDir.mkdirs();
String ts = new Date().getTime() + ".jpg";
File fileImage = new File(fileImageDir, "detected.original." + ts);
FileOutputStream stream = new FileOutputStream(fileImage);
if (prefs.getCamera().equalsIgnoreCase(PreferenceManager.BACK)) {
Bitmap bmps = ImageCodec.rotate(rawBitmap, 180, false);
bmps.compress(Bitmap.CompressFormat.JPEG, 100, stream);
} else {
rawBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
}
stream.flush();
stream.close();
message.getData().putString("path", fileImage.getAbsolutePath());
if (!doingVideoProcessing && prefs.getVideoMonitoringActive()) {
record(camera, serviceMessenger);
}
/**
* fileImage = new File(fileImageDir, "detected.match." + ts);
* stream = new FileOutputStream(fileImage);
* oldBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
* stream.flush();
* stream.close();
*
* message.getData().putString("path", fileImage.getAbsolutePath());
*/
serviceMessenger.send(message);
} catch (Exception e) {
// Cannot happen
Log.e("Preview", "error creating imnage", e);
}
}
}
Log.i("MotionListener", "Allowing further processing");
doingProcessing = false;
}
});
task.start();
lastPic = data;
try {
Camera.Parameters parameters = cam.getParameters();
parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
cam.setParameters(parameters);
} catch (Exception e) {
}
}
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
}
Aggregations