use of com.android.camera.CameraActivity in project android_packages_apps_Camera by CyanogenMod.
the class FaceView method onDraw.
@Override
protected void onDraw(Canvas canvas) {
if (!mBlocked && (mFaces != null) && (mFaces.length > 0)) {
final CameraScreenNail sn = ((CameraActivity) getContext()).getCameraScreenNail();
int rw = sn.getUncroppedRenderWidth();
int rh = sn.getUncroppedRenderHeight();
// Prepare the matrix.
if (((rh > rw) && ((mDisplayOrientation == 0) || (mDisplayOrientation == 180))) || ((rw > rh) && ((mDisplayOrientation == 90) || (mDisplayOrientation == 270)))) {
int temp = rw;
rw = rh;
rh = temp;
}
Util.prepareMatrix(mMatrix, mMirror, mDisplayOrientation, rw, rh);
int dx = (getWidth() - rw) / 2;
int dy = (getHeight() - rh) / 2;
// Focus indicator is directional. Rotate the matrix and the canvas
// so it looks correctly in all orientations.
canvas.save();
// postRotate is clockwise
mMatrix.postRotate(mOrientation);
// rotate is counter-clockwise (for canvas)
canvas.rotate(-mOrientation);
for (int i = 0; i < mFaces.length; i++) {
// Filter out false positives.
if (mFaces[i].score < 50)
continue;
// Transform the coordinates.
mRect.set(mFaces[i].rect);
if (LOGV)
Util.dumpRect(mRect, "Original rect");
mMatrix.mapRect(mRect);
if (LOGV)
Util.dumpRect(mRect, "Transformed rect");
mPaint.setColor(mColor);
mRect.offset(dx, dy);
canvas.drawOval(mRect, mPaint);
}
canvas.restore();
}
super.onDraw(canvas);
}
use of com.android.camera.CameraActivity in project android_packages_apps_Camera by CyanogenMod.
the class CameraStartUp method launchCamera.
private long launchCamera() {
long startupTime = 0;
try {
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.setClass(getInstrumentation().getTargetContext(), CameraActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
long beforeStart = System.currentTimeMillis();
Instrumentation inst = getInstrumentation();
Activity cameraActivity = inst.startActivitySync(intent);
long cameraStarted = System.currentTimeMillis();
Thread.sleep(WAIT_TIME_FOR_PREVIEW);
cameraActivity.finish();
startupTime = cameraStarted - beforeStart;
Thread.sleep(1000);
Log.v(TAG, "camera startup time: " + startupTime);
} catch (Exception e) {
Log.v(TAG, "Got exception", e);
fail("Fails to get the output file");
}
return startupTime;
}
use of com.android.camera.CameraActivity in project android_packages_apps_Camera by CyanogenMod.
the class ShotToShotLatency method testShotToShotLatency.
@LargeTest
public void testShotToShotLatency() {
long sigmaOfDiffFromMeanSquared = 0;
double mean = 0;
double standardDeviation = 0;
ArrayList<Long> captureTimes = new ArrayList<Long>();
ArrayList<Long> latencyTimes = new ArrayList<Long>();
Log.v(TAG, "start testShotToShotLatency test");
Instrumentation inst = getInstrumentation();
// Generate data points
for (int i = 0; i < TOTAL_NUMBER_OF_SNAPSHOTS; i++) {
inst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
sleep(SNAPSHOT_WAIT);
CameraActivity c = getActivity();
if (c.getCaptureStartTime() > 0) {
captureTimes.add(c.getCaptureStartTime());
}
}
// Calculate latencies
for (int j = 1; j < captureTimes.size(); j++) {
latencyTimes.add(captureTimes.get(j) - captureTimes.get(j - 1));
}
// Crunch numbers
for (long dataPoint : latencyTimes) {
mean += (double) dataPoint;
}
mean /= latencyTimes.size();
for (long dataPoint : latencyTimes) {
sigmaOfDiffFromMeanSquared += (dataPoint - mean) * (dataPoint - mean);
}
standardDeviation = Math.sqrt(sigmaOfDiffFromMeanSquared / latencyTimes.size());
// Report statistics
File outFile = new File(CAMERA_TEST_OUTPUT_FILE);
BufferedWriter output = null;
try {
output = new BufferedWriter(new FileWriter(outFile, true));
output.write("Shot to shot latency - mean: " + mean + "\n");
output.write("Shot to shot latency - standard deviation: " + standardDeviation + "\n");
cleanupLatencyImages();
} catch (IOException e) {
Log.e(TAG, "testShotToShotLatency IOException writing to log " + e.toString());
} finally {
try {
if (output != null) {
output.close();
}
} catch (IOException e) {
Log.e(TAG, "Error closing file: " + e.toString());
}
}
}
use of com.android.camera.CameraActivity in project android_packages_apps_Camera by CyanogenMod.
the class CameraLatency method testImageCapture.
@LargeTest
public void testImageCapture() {
Log.v(TAG, "start testImageCapture test");
Instrumentation inst = getInstrumentation();
inst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_DOWN);
try {
for (int i = 0; i < TOTAL_NUMBER_OF_IMAGECAPTURE; i++) {
Thread.sleep(WAIT_FOR_IMAGE_CAPTURE_TO_BE_TAKEN);
inst.sendKeyDownUpSync(KeyEvent.KEYCODE_DPAD_CENTER);
Thread.sleep(WAIT_FOR_IMAGE_CAPTURE_TO_BE_TAKEN);
//skip the first measurement
if (i != 0) {
CameraActivity c = getActivity();
// skip the shot and try again
if (c.getAutoFocusTime() != -1) {
mTotalAutoFocusTime += c.getAutoFocusTime();
mTotalShutterLag += c.getShutterLag();
mTotalShutterToPictureDisplayedTime += c.getShutterToPictureDisplayedTime();
mTotalPictureDisplayedToJpegCallbackTime += c.getPictureDisplayedToJpegCallbackTime();
mTotalJpegCallbackFinishTime += c.getJpegCallbackFinishTime();
} else {
i--;
continue;
}
}
}
} catch (Exception e) {
Log.v(TAG, "Got exception", e);
}
//ToDO: yslau
//1) Need to get the baseline from the cupcake so that we can add the
//failure condition of the camera latency.
//2) Only count those number with succesful capture. Set the timer to invalid
//before capture and ignore them if the value is invalid
int numberofRun = TOTAL_NUMBER_OF_IMAGECAPTURE - 1;
mAvgAutoFocusTime = mTotalAutoFocusTime / numberofRun;
mAvgShutterLag = mTotalShutterLag / numberofRun;
mAvgShutterToPictureDisplayedTime = mTotalShutterToPictureDisplayedTime / numberofRun;
mAvgPictureDisplayedToJpegCallbackTime = mTotalPictureDisplayedToJpegCallbackTime / numberofRun;
mAvgJpegCallbackFinishTime = mTotalJpegCallbackFinishTime / numberofRun;
try {
FileWriter fstream = null;
fstream = new FileWriter(CAMERA_TEST_OUTPUT_FILE, true);
BufferedWriter out = new BufferedWriter(fstream);
out.write("Camera Latency : \n");
out.write("Number of loop: " + TOTAL_NUMBER_OF_IMAGECAPTURE + "\n");
out.write("Avg AutoFocus = " + mAvgAutoFocusTime + "\n");
out.write("Avg mShutterLag = " + mAvgShutterLag + "\n");
out.write("Avg mShutterToPictureDisplayedTime = " + mAvgShutterToPictureDisplayedTime + "\n");
out.write("Avg mPictureDisplayedToJpegCallbackTime = " + mAvgPictureDisplayedToJpegCallbackTime + "\n");
out.write("Avg mJpegCallbackFinishTime = " + mAvgJpegCallbackFinishTime + "\n");
out.close();
fstream.close();
} catch (Exception e) {
fail("Camera Latency write output to file");
}
Log.v(TAG, "The Image capture wait time = " + WAIT_FOR_IMAGE_CAPTURE_TO_BE_TAKEN);
Log.v(TAG, "Avg AutoFocus = " + mAvgAutoFocusTime);
Log.v(TAG, "Avg mShutterLag = " + mAvgShutterLag);
Log.v(TAG, "Avg mShutterToPictureDisplayedTime = " + mAvgShutterToPictureDisplayedTime);
Log.v(TAG, "Avg mPictureDisplayedToJpegCallbackTime = " + mAvgPictureDisplayedToJpegCallbackTime);
Log.v(TAG, "Avg mJpegCallbackFinishTime = " + mAvgJpegCallbackFinishTime);
}
Aggregations