Search in sources :

Example 1 with Size

use of org.opencv.core.Size in project FaceRecognitionApp by Lauszus.

the class FaceRecognitionAppActivity method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_face_recognition_app);
    mToolbar = (Toolbar) findViewById(R.id.toolbar);
    // Sets the Toolbar to act as the ActionBar for this Activity window
    setSupportActionBar(mToolbar);
    DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
    ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(this, drawer, mToolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
    drawer.addDrawerListener(toggle);
    toggle.syncState();
    final RadioButton mRadioButtonEigenfaces = (RadioButton) findViewById(R.id.eigenfaces);
    final RadioButton mRadioButtonFisherfaces = (RadioButton) findViewById(R.id.fisherfaces);
    mRadioButtonEigenfaces.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            useEigenfaces = true;
            if (!trainFaces()) {
                // Set variable back
                useEigenfaces = false;
                showToast("Still training...", Toast.LENGTH_SHORT);
                mRadioButtonEigenfaces.setChecked(useEigenfaces);
                mRadioButtonFisherfaces.setChecked(!useEigenfaces);
            }
        }
    });
    mRadioButtonFisherfaces.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            useEigenfaces = false;
            if (!trainFaces()) {
                // Set variable back
                useEigenfaces = true;
                showToast("Still training...", Toast.LENGTH_SHORT);
                mRadioButtonEigenfaces.setChecked(useEigenfaces);
                mRadioButtonFisherfaces.setChecked(!useEigenfaces);
            }
        }
    });
    // Set radio button based on value stored in shared preferences
    prefs = PreferenceManager.getDefaultSharedPreferences(this);
    useEigenfaces = prefs.getBoolean("useEigenfaces", false);
    mRadioButtonEigenfaces.setChecked(useEigenfaces);
    mRadioButtonFisherfaces.setChecked(!useEigenfaces);
    // Used to store ArrayLists in the shared preferences
    tinydb = new TinyDB(this);
    mThresholdFace = (SeekBarArrows) findViewById(R.id.threshold_face);
    mThresholdFace.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Face threshold: " + mThresholdFace.progressToString(progress));
            faceThreshold = progress;
        }
    });
    // Get initial value
    faceThreshold = mThresholdFace.getProgress();
    mThresholdDistance = (SeekBarArrows) findViewById(R.id.threshold_distance);
    mThresholdDistance.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Distance threshold: " + mThresholdDistance.progressToString(progress));
            distanceThreshold = progress;
        }
    });
    // Get initial value
    distanceThreshold = mThresholdDistance.getProgress();
    mMaximumImages = (SeekBarArrows) findViewById(R.id.maximum_images);
    mMaximumImages.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Maximum number of images: " + mMaximumImages.progressToString(progress));
            maximumImages = (int) progress;
            if (images != null && images.size() > maximumImages) {
                int nrRemoveImages = images.size() - maximumImages;
                Log.i(TAG, "Removed " + nrRemoveImages + " images from the list");
                // Remove oldest images
                images.subList(0, nrRemoveImages).clear();
                // Remove oldest labels
                imagesLabels.subList(0, nrRemoveImages).clear();
                // Retrain faces
                trainFaces();
            }
        }
    });
    // Get initial value
    maximumImages = (int) mMaximumImages.getProgress();
    findViewById(R.id.clear_button).setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            Log.i(TAG, "Cleared training set");
            // Clear both arrays, when new instance is created
            images.clear();
            imagesLabels.clear();
            showToast("Training set cleared", Toast.LENGTH_SHORT);
        }
    });
    findViewById(R.id.take_picture_button).setOnClickListener(new View.OnClickListener() {

        NativeMethods.MeasureDistTask mMeasureDistTask;

        @Override
        public void onClick(View v) {
            if (mMeasureDistTask != null && mMeasureDistTask.getStatus() != AsyncTask.Status.FINISHED) {
                Log.i(TAG, "mMeasureDistTask is still running");
                showToast("Still processing old image...", Toast.LENGTH_SHORT);
                return;
            }
            if (mTrainFacesTask != null && mTrainFacesTask.getStatus() != AsyncTask.Status.FINISHED) {
                Log.i(TAG, "mTrainFacesTask is still running");
                showToast("Still training...", Toast.LENGTH_SHORT);
                return;
            }
            Log.i(TAG, "Gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total());
            if (mGray.total() == 0)
                return;
            // Scale image in order to decrease computation time
            Size imageSize = new Size(200, 200.0f / ((float) mGray.width() / (float) mGray.height()));
            Imgproc.resize(mGray, mGray, imageSize);
            Log.i(TAG, "Small gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total());
            // SaveImage(mGray);
            // Create column vector
            Mat image = mGray.reshape(0, (int) mGray.total());
            Log.i(TAG, "Vector height: " + image.height() + " Width: " + image.width() + " total: " + image.total());
            // Add current image to the array
            images.add(image);
            if (images.size() > maximumImages) {
                // Remove first image
                images.remove(0);
                // Remove first label
                imagesLabels.remove(0);
                Log.i(TAG, "The number of images is limited to: " + images.size());
            }
            // Calculate normalized Euclidean distance
            mMeasureDistTask = new NativeMethods.MeasureDistTask(useEigenfaces, measureDistTaskCallback);
            mMeasureDistTask.execute(image);
            showLabelsDialog();
        }
    });
    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_java_surface_view);
    mOpenCvCameraView.setCameraIndex(prefs.getInt("mCameraIndex", CameraBridgeViewBase.CAMERA_ID_FRONT));
    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
    mOpenCvCameraView.setCvCameraViewListener(this);
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) ActionBarDrawerToggle(android.support.v7.app.ActionBarDrawerToggle) RadioButton(android.widget.RadioButton) SurfaceView(android.view.SurfaceView) View(android.view.View) AdapterView(android.widget.AdapterView) TextView(android.widget.TextView) ListView(android.widget.ListView) DrawerLayout(android.support.v4.widget.DrawerLayout)

Example 2 with Size

use of org.opencv.core.Size in project FaceRecognitionApp by Lauszus.

the class JavaCameraView method initializeCamera.

protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
        mCamera = null;
        if (mCameraIndex == CAMERA_ID_ANY || isEmulator()) {
            // Just open any camera on emulators
            Log.d(TAG, "Trying to open camera with old open()");
            try {
                mCamera = Camera.open();
            } catch (Exception e) {
                Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
            }
            if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                boolean connected = false;
                for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
                    try {
                        mCamera = Camera.open(camIdx);
                        connected = true;
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
                    }
                    if (connected)
                        break;
                }
            }
        } else {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                int localCameraIndex = mCameraIndex;
                if (mCameraIndex == CAMERA_ID_BACK) {
                    Log.i(TAG, "Trying to open back camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                } else if (mCameraIndex == CAMERA_ID_FRONT) {
                    Log.i(TAG, "Trying to open front camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                }
                if (localCameraIndex == CAMERA_ID_BACK) {
                    Log.e(TAG, "Back camera not found!");
                } else if (localCameraIndex == CAMERA_ID_FRONT) {
                    Log.e(TAG, "Front camera not found!");
                } else {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
                    try {
                        mCamera = Camera.open(localCameraIndex);
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
                    }
                }
            }
        }
        if (mCamera == null)
            return false;
        /* Now set camera parameters */
        try {
            Camera.Parameters params = mCamera.getParameters();
            Log.d(TAG, "getSupportedPreviewSizes()");
            List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
            if (sizes != null) {
                /* Select the size that fits surface considering maximum size allowed */
                Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
                params.setPreviewFormat(ImageFormat.NV21);
                // See: http://stackoverflow.com/a/27233595/2175837
                if (// Check if we are using the Android emulator
                isEmulator())
                    params.setPreviewFormat(ImageFormat.YV12);
                Log.d(TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height));
                params.setPreviewSize((int) frameSize.width, (int) frameSize.height);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
                    params.setRecordingHint(true);
                List<String> FocusModes = params.getSupportedFocusModes();
                if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                }
                mCamera.setParameters(params);
                params = mCamera.getParameters();
                previewFormat = params.getPreviewFormat();
                mFrameWidth = params.getPreviewSize().width;
                mFrameHeight = params.getPreviewSize().height;
                if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
                    mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
                else
                    mScale = 0;
                if (mFpsMeter != null) {
                    mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
                }
                int size = mFrameWidth * mFrameHeight;
                size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
                mBuffer = new byte[size];
                mCamera.addCallbackBuffer(mBuffer);
                mCamera.setPreviewCallbackWithBuffer(this);
                mFrameChain = new Mat[2];
                mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                AllocateCache();
                mCameraFrame = new JavaCameraFrame[2];
                mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
                mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
                    mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                    mCamera.setPreviewTexture(mSurfaceTexture);
                } else
                    mCamera.setPreviewDisplay(null);
                /* Finally we are ready to start the preview */
                Log.d(TAG, "startPreview");
                mCamera.startPreview();
            } else
                result = false;
        } catch (Exception e) {
            result = false;
            e.printStackTrace();
        }
    }
    return result;
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) SurfaceTexture(android.graphics.SurfaceTexture) Camera(android.hardware.Camera)

Example 3 with Size

use of org.opencv.core.Size in project Frankenstein by olir.

the class CloneLR method configure.

@Override
public Mat configure(Mat sourceFrame) {
    smallWidth = sourceFrame.cols();
    smallHeight = sourceFrame.rows();
    newFrame = sourceFrame.clone();
    Imgproc.resize(sourceFrame, newFrame, new Size((double) (smallWidth << 1), (double) smallHeight));
    return newFrame;
}
Also used : Size(org.opencv.core.Size)

Example 4 with Size

use of org.opencv.core.Size in project Frankenstein by olir.

the class LR2VR180 method configure.

@Override
public Mat configure(Mat sourceFrame) {
    float aspect = ((float) sourceFrame.cols()) / (float) sourceFrame.rows();
    int vcut = 0;
    System.out.println("aspect (A): " + aspect);
    if (aspect > 2f)
        // not a half sbs
        aspect = aspect / 2f;
    if (aspect < 1.4f) {
        if (aspect < 1.3f)
            aspect *= 1.3f;
        vcut = 1;
    }
    System.out.println("aspect (B): " + aspect);
    // aspect heuristic
    vrVerticalSpan = 0.7f - (aspect - 1.33333f) / 1.7f;
    if (vrVerticalSpan < 0.4f)
        vrVerticalSpan = 0.4f;
    System.out.println("VerticalSpan: " + vrVerticalSpan + "  vcut: " + vcut);
    borderW = (int) ((1.0f - factor) * (float) (sourceFrame.cols() >> 1) * 0.25f) + vcut * (int) (sourceFrame.cols() * 0.50);
    borderH = (int) (((1.0f - factor) + (1.0f / vrVerticalSpan - 1.0) * convert3DMode) * (float) sourceFrame.rows() * 0.5f);
    borderH = ((borderH + (2 << (ALIGMENT_POT - 1)) - 1) >> ALIGMENT_POT) << ALIGMENT_POT;
    System.out.println("borderW: " + borderW + "  borderH: " + borderH);
    newFrame = sourceFrame.clone();
    Imgproc.resize(sourceFrame, newFrame, new Size((double) sourceFrame.cols() + 4 * borderW, (double) sourceFrame.rows() + 2 * borderH));
    newFrame.setTo(new Scalar(0, 0, 0));
    smallWidth = sourceFrame.cols() >> 1;
    smallHeight = sourceFrame.rows();
    System.out.println("aspect: " + (((float) sourceFrame.cols()) / 2f / (float) sourceFrame.rows()) + " ==> " + (((float) newFrame.cols()) / 2f / (float) newFrame.rows()));
    if (coneCorrection) {
        for (int i = 0; i < STEPS_PER_DIRECTION; i++) {
            bufferFrame[i] = sourceFrame.clone();
            Imgproc.resize(sourceFrame, bufferFrame[i], new Size((double) sourceFrame.cols() + 4 * borderW, (double) sourceFrame.rows() + 2 * borderH));
            bufferFrame[i].setTo(new Scalar(0, 0, 0));
        }
        double[] weight = new double[STEPS_PER_DIRECTION];
        double sum = 0.0;
        for (int i = 0; i < STEPS_PER_DIRECTION; i++) {
            weight[i] = 1.0 + ((double) i) / (double) (STEPS_PER_DIRECTION - 1);
            sum += weight[i];
        }
        int hsumSrc = 0;
        int hsumDest = 0;
        for (int i = 0; i < STEPS_PER_DIRECTION; i++) {
            weight[i] = sum / ((double) STEPS_PER_DIRECTION) / weight[i];
            srcStepOffset[i] = hsumSrc;
            srcStepHeight[i] = (int) ((((double) (smallHeight >> 1)) / (double) STEPS_PER_DIRECTION));
            hsumSrc += srcStepHeight[i];
            destStepOffset[i] = hsumDest;
            destStepHeight[i] = (int) (weight[i] * (((double) (smallHeight >> 1)) / (double) STEPS_PER_DIRECTION));
            hsumDest += destStepHeight[i];
        }
    // srcStepHeight[STEPS_PER_DIRECTION - 1] += (smallHeight >> 1) - hsumSrc;
    // destStepHeight[STEPS_PER_DIRECTION - 1] += (smallHeight >> 1) - hsumDest;
    }
    return newFrame;
}
Also used : Size(org.opencv.core.Size) Scalar(org.opencv.core.Scalar)

Example 5 with Size

use of org.opencv.core.Size in project Frankenstein by olir.

the class OutputSizeLimiter method configure.

@Override
public Mat configure(Mat sourceFrame) {
    if (sourceFrame.cols() <= maximumWidth || maximumWidth < 1)
        return sourceFrame;
    newWidth = maximumWidth;
    newHeight = (int) ((float) maximumWidth * (((float) sourceFrame.rows()) / (float) sourceFrame.cols()));
    newFrame = sourceFrame.clone();
    Imgproc.resize(sourceFrame, newFrame, new Size(newWidth, newHeight));
    return newFrame;
}
Also used : Size(org.opencv.core.Size)

Aggregations

Size (org.opencv.core.Size)28 Mat (org.opencv.core.Mat)17 Scalar (org.opencv.core.Scalar)9 ArrayList (java.util.ArrayList)8 Rect (org.opencv.core.Rect)7 Point (org.opencv.core.Point)6 MatOfPoint (org.opencv.core.MatOfPoint)4 SurfaceTexture (android.graphics.SurfaceTexture)2 Camera (android.hardware.Camera)2 Line (com.disnodeteam.dogecv.math.Line)2 File (java.io.File)2 MatOfPoint2f (org.opencv.core.MatOfPoint2f)2 DrawerLayout (android.support.v4.widget.DrawerLayout)1 ActionBarDrawerToggle (android.support.v7.app.ActionBarDrawerToggle)1 SurfaceView (android.view.SurfaceView)1 View (android.view.View)1 AdapterView (android.widget.AdapterView)1 ListView (android.widget.ListView)1 RadioButton (android.widget.RadioButton)1 TextView (android.widget.TextView)1