Search in sources :

Example 1 with Mat

use of org.opencv.core.Mat in project FaceRecognitionApp by Lauszus.

the class FaceRecognitionAppActivity method SaveImage.

@SuppressWarnings("ResultOfMethodCallIgnored")
public void SaveImage(Mat mat) {
    Mat mIntermediateMat = new Mat();
    if (// Grayscale image
    mat.channels() == 1)
        Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);
    else
        Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_RGBA2BGR);
    // Save pictures in Pictures directory
    File path = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), TAG);
    // Create directory if needed
    path.mkdir();
    String fileName = "IMG_" + new SimpleDateFormat("yyyyMMdd_HHmmss_SSS", Locale.US).format(new Date()) + ".png";
    File file = new File(path, fileName);
    boolean bool = Imgcodecs.imwrite(file.toString(), mIntermediateMat);
    if (bool)
        Log.i(TAG, "SUCCESS writing image to external storage");
    else
        Log.e(TAG, "Failed writing image to external storage");
}
Also used : Mat(org.opencv.core.Mat) File(java.io.File) SimpleDateFormat(java.text.SimpleDateFormat) Date(java.util.Date)

Example 2 with Mat

use of org.opencv.core.Mat in project FaceRecognitionApp by Lauszus.

the class FaceRecognitionAppActivity method onCameraFrame.

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat mGrayTmp = inputFrame.gray();
    Mat mRgbaTmp = inputFrame.rgba();
    // Flip image to get mirror effect
    int orientation = mOpenCvCameraView.getScreenOrientation();
    if (// Treat emulators as a special case
    mOpenCvCameraView.isEmulator())
        // Flip along y-axis
        Core.flip(mRgbaTmp, mRgbaTmp, 1);
    else {
        switch(// RGB image
        orientation) {
            case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    // Flip along x-axis
                    Core.flip(mRgbaTmp, mRgbaTmp, 0);
                else
                    // Flip along both axis
                    Core.flip(mRgbaTmp, mRgbaTmp, -1);
                break;
            case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    // Flip along y-axis
                    Core.flip(mRgbaTmp, mRgbaTmp, 1);
                break;
        }
        switch(// Grayscale image
        orientation) {
            case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
                // Rotate image
                Core.transpose(mGrayTmp, mGrayTmp);
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    // Flip along both axis
                    Core.flip(mGrayTmp, mGrayTmp, -1);
                else
                    // Flip along y-axis
                    Core.flip(mGrayTmp, mGrayTmp, 1);
                break;
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
                // Rotate image
                Core.transpose(mGrayTmp, mGrayTmp);
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK)
                    // Flip along x-axis
                    Core.flip(mGrayTmp, mGrayTmp, 0);
                break;
            case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    // Flip along y-axis
                    Core.flip(mGrayTmp, mGrayTmp, 1);
                break;
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
                // Flip along x-axis
                Core.flip(mGrayTmp, mGrayTmp, 0);
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK)
                    // Flip along y-axis
                    Core.flip(mGrayTmp, mGrayTmp, 1);
                break;
        }
    }
    mGray = mGrayTmp;
    mRgba = mRgbaTmp;
    return mRgba;
}
Also used : Mat(org.opencv.core.Mat)

Example 3 with Mat

use of org.opencv.core.Mat in project FaceRecognitionApp by Lauszus.

the class FaceRecognitionAppActivity method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.activity_face_recognition_app);
    mToolbar = (Toolbar) findViewById(R.id.toolbar);
    // Sets the Toolbar to act as the ActionBar for this Activity window
    setSupportActionBar(mToolbar);
    DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
    ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(this, drawer, mToolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
    drawer.addDrawerListener(toggle);
    toggle.syncState();
    final RadioButton mRadioButtonEigenfaces = (RadioButton) findViewById(R.id.eigenfaces);
    final RadioButton mRadioButtonFisherfaces = (RadioButton) findViewById(R.id.fisherfaces);
    mRadioButtonEigenfaces.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            useEigenfaces = true;
            if (!trainFaces()) {
                // Set variable back
                useEigenfaces = false;
                showToast("Still training...", Toast.LENGTH_SHORT);
                mRadioButtonEigenfaces.setChecked(useEigenfaces);
                mRadioButtonFisherfaces.setChecked(!useEigenfaces);
            }
        }
    });
    mRadioButtonFisherfaces.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            useEigenfaces = false;
            if (!trainFaces()) {
                // Set variable back
                useEigenfaces = true;
                showToast("Still training...", Toast.LENGTH_SHORT);
                mRadioButtonEigenfaces.setChecked(useEigenfaces);
                mRadioButtonFisherfaces.setChecked(!useEigenfaces);
            }
        }
    });
    // Set radio button based on value stored in shared preferences
    prefs = PreferenceManager.getDefaultSharedPreferences(this);
    useEigenfaces = prefs.getBoolean("useEigenfaces", false);
    mRadioButtonEigenfaces.setChecked(useEigenfaces);
    mRadioButtonFisherfaces.setChecked(!useEigenfaces);
    // Used to store ArrayLists in the shared preferences
    tinydb = new TinyDB(this);
    mThresholdFace = (SeekBarArrows) findViewById(R.id.threshold_face);
    mThresholdFace.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Face threshold: " + mThresholdFace.progressToString(progress));
            faceThreshold = progress;
        }
    });
    // Get initial value
    faceThreshold = mThresholdFace.getProgress();
    mThresholdDistance = (SeekBarArrows) findViewById(R.id.threshold_distance);
    mThresholdDistance.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Distance threshold: " + mThresholdDistance.progressToString(progress));
            distanceThreshold = progress;
        }
    });
    // Get initial value
    distanceThreshold = mThresholdDistance.getProgress();
    mMaximumImages = (SeekBarArrows) findViewById(R.id.maximum_images);
    mMaximumImages.setOnSeekBarArrowsChangeListener(new SeekBarArrows.OnSeekBarArrowsChangeListener() {

        @Override
        public void onProgressChanged(float progress) {
            Log.i(TAG, "Maximum number of images: " + mMaximumImages.progressToString(progress));
            maximumImages = (int) progress;
            if (images != null && images.size() > maximumImages) {
                int nrRemoveImages = images.size() - maximumImages;
                Log.i(TAG, "Removed " + nrRemoveImages + " images from the list");
                // Remove oldest images
                images.subList(0, nrRemoveImages).clear();
                // Remove oldest labels
                imagesLabels.subList(0, nrRemoveImages).clear();
                // Retrain faces
                trainFaces();
            }
        }
    });
    // Get initial value
    maximumImages = (int) mMaximumImages.getProgress();
    findViewById(R.id.clear_button).setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            Log.i(TAG, "Cleared training set");
            // Clear both arrays, when new instance is created
            images.clear();
            imagesLabels.clear();
            showToast("Training set cleared", Toast.LENGTH_SHORT);
        }
    });
    findViewById(R.id.take_picture_button).setOnClickListener(new View.OnClickListener() {

        NativeMethods.MeasureDistTask mMeasureDistTask;

        @Override
        public void onClick(View v) {
            if (mMeasureDistTask != null && mMeasureDistTask.getStatus() != AsyncTask.Status.FINISHED) {
                Log.i(TAG, "mMeasureDistTask is still running");
                showToast("Still processing old image...", Toast.LENGTH_SHORT);
                return;
            }
            if (mTrainFacesTask != null && mTrainFacesTask.getStatus() != AsyncTask.Status.FINISHED) {
                Log.i(TAG, "mTrainFacesTask is still running");
                showToast("Still training...", Toast.LENGTH_SHORT);
                return;
            }
            Log.i(TAG, "Gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total());
            if (mGray.total() == 0)
                return;
            // Scale image in order to decrease computation time
            Size imageSize = new Size(200, 200.0f / ((float) mGray.width() / (float) mGray.height()));
            Imgproc.resize(mGray, mGray, imageSize);
            Log.i(TAG, "Small gray height: " + mGray.height() + " Width: " + mGray.width() + " total: " + mGray.total());
            // SaveImage(mGray);
            // Create column vector
            Mat image = mGray.reshape(0, (int) mGray.total());
            Log.i(TAG, "Vector height: " + image.height() + " Width: " + image.width() + " total: " + image.total());
            // Add current image to the array
            images.add(image);
            if (images.size() > maximumImages) {
                // Remove first image
                images.remove(0);
                // Remove first label
                imagesLabels.remove(0);
                Log.i(TAG, "The number of images is limited to: " + images.size());
            }
            // Calculate normalized Euclidean distance
            mMeasureDistTask = new NativeMethods.MeasureDistTask(useEigenfaces, measureDistTaskCallback);
            mMeasureDistTask.execute(image);
            showLabelsDialog();
        }
    });
    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_java_surface_view);
    mOpenCvCameraView.setCameraIndex(prefs.getInt("mCameraIndex", CameraBridgeViewBase.CAMERA_ID_FRONT));
    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
    mOpenCvCameraView.setCvCameraViewListener(this);
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) ActionBarDrawerToggle(android.support.v7.app.ActionBarDrawerToggle) RadioButton(android.widget.RadioButton) SurfaceView(android.view.SurfaceView) View(android.view.View) AdapterView(android.widget.AdapterView) TextView(android.widget.TextView) ListView(android.widget.ListView) DrawerLayout(android.support.v4.widget.DrawerLayout)

Example 4 with Mat

use of org.opencv.core.Mat in project FaceRecognitionApp by Lauszus.

the class JavaCameraView method initializeCamera.

protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
        mCamera = null;
        if (mCameraIndex == CAMERA_ID_ANY || isEmulator()) {
            // Just open any camera on emulators
            Log.d(TAG, "Trying to open camera with old open()");
            try {
                mCamera = Camera.open();
            } catch (Exception e) {
                Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
            }
            if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                boolean connected = false;
                for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
                    try {
                        mCamera = Camera.open(camIdx);
                        connected = true;
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
                    }
                    if (connected)
                        break;
                }
            }
        } else {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                int localCameraIndex = mCameraIndex;
                if (mCameraIndex == CAMERA_ID_BACK) {
                    Log.i(TAG, "Trying to open back camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                } else if (mCameraIndex == CAMERA_ID_FRONT) {
                    Log.i(TAG, "Trying to open front camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                }
                if (localCameraIndex == CAMERA_ID_BACK) {
                    Log.e(TAG, "Back camera not found!");
                } else if (localCameraIndex == CAMERA_ID_FRONT) {
                    Log.e(TAG, "Front camera not found!");
                } else {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
                    try {
                        mCamera = Camera.open(localCameraIndex);
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
                    }
                }
            }
        }
        if (mCamera == null)
            return false;
        /* Now set camera parameters */
        try {
            Camera.Parameters params = mCamera.getParameters();
            Log.d(TAG, "getSupportedPreviewSizes()");
            List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
            if (sizes != null) {
                /* Select the size that fits surface considering maximum size allowed */
                Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
                params.setPreviewFormat(ImageFormat.NV21);
                // See: http://stackoverflow.com/a/27233595/2175837
                if (// Check if we are using the Android emulator
                isEmulator())
                    params.setPreviewFormat(ImageFormat.YV12);
                Log.d(TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height));
                params.setPreviewSize((int) frameSize.width, (int) frameSize.height);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
                    params.setRecordingHint(true);
                List<String> FocusModes = params.getSupportedFocusModes();
                if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                }
                mCamera.setParameters(params);
                params = mCamera.getParameters();
                previewFormat = params.getPreviewFormat();
                mFrameWidth = params.getPreviewSize().width;
                mFrameHeight = params.getPreviewSize().height;
                if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
                    mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
                else
                    mScale = 0;
                if (mFpsMeter != null) {
                    mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
                }
                int size = mFrameWidth * mFrameHeight;
                size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
                mBuffer = new byte[size];
                mCamera.addCallbackBuffer(mBuffer);
                mCamera.setPreviewCallbackWithBuffer(this);
                mFrameChain = new Mat[2];
                mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                AllocateCache();
                mCameraFrame = new JavaCameraFrame[2];
                mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
                mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
                    mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                    mCamera.setPreviewTexture(mSurfaceTexture);
                } else
                    mCamera.setPreviewDisplay(null);
                /* Finally we are ready to start the preview */
                Log.d(TAG, "startPreview");
                mCamera.startPreview();
            } else
                result = false;
        } catch (Exception e) {
            result = false;
            e.printStackTrace();
        }
    }
    return result;
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) SurfaceTexture(android.graphics.SurfaceTexture) Camera(android.hardware.Camera)

Example 5 with Mat

use of org.opencv.core.Mat in project FaceRecognitionApp by Lauszus.

the class TinyDB method getListMat.

public ArrayList<Mat> getListMat(String key) {
    ArrayList<String> objStrings = getListString(key);
    ArrayList<Mat> objects = new ArrayList<Mat>();
    for (String jObjString : objStrings) {
        byte[] data = Base64.decode(jObjString, Base64.DEFAULT);
        Mat mat = new Mat(data.length, 1, CvType.CV_8U);
        mat.put(0, 0, data);
        objects.add(mat);
    }
    return objects;
}
Also used : Mat(org.opencv.core.Mat) ArrayList(java.util.ArrayList)

Aggregations

Mat (org.opencv.core.Mat)239 ArrayList (java.util.ArrayList)34 Point (org.opencv.core.Point)33 MatOfPoint (org.opencv.core.MatOfPoint)27 MatOfKeyPoint (org.opencv.core.MatOfKeyPoint)19 KeyPoint (org.opencv.core.KeyPoint)18 Size (org.opencv.core.Size)17 Rect (org.opencv.core.Rect)15 Scalar (org.opencv.core.Scalar)9 File (java.io.File)7 BufferedImage (java.awt.image.BufferedImage)5 FilterContext (de.serviceflow.frankenstein.plugin.api.FilterContext)4 SegmentVideoFilter (de.serviceflow.frankenstein.plugin.api.SegmentVideoFilter)4 DefaultFilterContext (de.serviceflow.frankenstein.vf.DefaultFilterContext)4 VideoFilter (de.serviceflow.frankenstein.vf.VideoFilter)4 DataBufferByte (java.awt.image.DataBufferByte)4 FilterElement (de.serviceflow.frankenstein.vf.FilterElement)3 IOException (java.io.IOException)3 InvocationTargetException (java.lang.reflect.InvocationTargetException)3 ImageNotFoundException (org.getopentest.exceptions.ImageNotFoundException)3