Search in sources :

Example 26 with Size

use of org.opencv.core.Size in project Relic_Main by TeamOverdrive.

the class LeviColorFilter method leviBlueFilter.

// BLUE FILTER
public void leviBlueFilter(Mat input, Mat mask) {
    List<Mat> channels = new ArrayList<>();
    Imgproc.cvtColor(input, input, Imgproc.COLOR_RGB2Lab);
    Imgproc.GaussianBlur(input, input, new Size(3, 3), 0);
    Core.split(input, channels);
    Imgproc.threshold(channels.get(1), mask, 145, 255, Imgproc.THRESH_BINARY);
    for (int i = 0; i < channels.size(); i++) {
        channels.get(i).release();
    }
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) ArrayList(java.util.ArrayList)

Example 27 with Size

use of org.opencv.core.Size in project Relic_Main by TeamOverdrive.

the class JavaCameraView method initializeCamera.

protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
        mCamera = null;
        if (mCameraIndex == CAMERA_ID_ANY) {
            Log.d(TAG, "Trying to open camera with old open()");
            try {
                mCamera = Camera.open();
            } catch (Exception e) {
                Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
            }
            if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                boolean connected = false;
                for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
                    try {
                        mCamera = Camera.open(camIdx);
                        connected = true;
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
                    }
                    if (connected)
                        break;
                }
            }
        } else {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
                int localCameraIndex = mCameraIndex;
                if (mCameraIndex == CAMERA_ID_BACK) {
                    Log.i(TAG, "Trying to open back camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                } else if (mCameraIndex == CAMERA_ID_FRONT) {
                    Log.i(TAG, "Trying to open front camera");
                    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
                    for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
                        Camera.getCameraInfo(camIdx, cameraInfo);
                        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                            localCameraIndex = camIdx;
                            break;
                        }
                    }
                }
                if (localCameraIndex == CAMERA_ID_BACK) {
                    Log.e(TAG, "Back camera not found!");
                } else if (localCameraIndex == CAMERA_ID_FRONT) {
                    Log.e(TAG, "Front camera not found!");
                } else {
                    Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
                    try {
                        mCamera = Camera.open(localCameraIndex);
                    } catch (RuntimeException e) {
                        Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
                    }
                }
            }
        }
        if (mCamera == null)
            return false;
        /* Now set camera parameters */
        try {
            Camera.Parameters params = mCamera.getParameters();
            Log.d(TAG, "getSupportedPreviewSizes()");
            List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
            if (sizes != null) {
                /* Select the size that fits surface considering maximum size allowed */
                Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
                params.setPreviewFormat(ImageFormat.NV21);
                Log.d(TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height));
                params.setPreviewSize((int) frameSize.width, (int) frameSize.height);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
                    params.setRecordingHint(true);
                List<String> FocusModes = params.getSupportedFocusModes();
                if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                }
                mCamera.setParameters(params);
                params = mCamera.getParameters();
                mFrameWidth = params.getPreviewSize().width;
                mFrameHeight = params.getPreviewSize().height;
                if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
                    mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
                else
                    mScale = 0;
                if (mFpsMeter != null) {
                    mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
                }
                int size = mFrameWidth * mFrameHeight;
                size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
                mBuffer = new byte[size];
                mCamera.addCallbackBuffer(mBuffer);
                mCamera.setPreviewCallbackWithBuffer(this);
                mFrameChain = new Mat[2];
                mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
                AllocateCache();
                mCameraFrame = new JavaCameraFrame[2];
                mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
                mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
                    mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
                    mCamera.setPreviewTexture(mSurfaceTexture);
                } else
                    mCamera.setPreviewDisplay(null);
                /* Finally we are ready to start the preview */
                Log.d(TAG, "startPreview");
                mCamera.startPreview();
            } else
                result = false;
        } catch (Exception e) {
            result = false;
            e.printStackTrace();
        }
    }
    return result;
}
Also used : Mat(org.opencv.core.Mat) Size(org.opencv.core.Size) SurfaceTexture(android.graphics.SurfaceTexture) Camera(android.hardware.Camera)

Example 28 with Size

use of org.opencv.core.Size in project Relic_Main by TeamOverdrive.

the class RelicOpMode method init.

/*
     * Code to run ONCE when the driver hits INIT
     */
@Override
public void init() {
    telemetry.addData("Status", "Initialized");
    genericDetector = new GenericDetector();
    genericDetector.init(hardwareMap.appContext, CameraViewDisplay.getInstance());
    genericDetector.colorFilter = new LeviColorFilter(LeviColorFilter.ColorPreset.YELLOW);
    // genericDetector.colorFilter = new HSVColorFilter(new Scalar(30,200,200), new Scalar(15,50,50));
    genericDetector.debugContours = false;
    genericDetector.minArea = 700;
    genericDetector.perfectRatio = 1.8;
    genericDetector.stretch = true;
    genericDetector.stretchKernal = new Size(2, 50);
    genericDetector.enable();
}
Also used : LeviColorFilter(com.disnodeteam.dogecv.filters.LeviColorFilter) Size(org.opencv.core.Size) GenericDetector(com.disnodeteam.dogecv.detectors.GenericDetector)

Aggregations

Size (org.opencv.core.Size)28 Mat (org.opencv.core.Mat)17 Scalar (org.opencv.core.Scalar)9 ArrayList (java.util.ArrayList)8 Rect (org.opencv.core.Rect)7 Point (org.opencv.core.Point)6 MatOfPoint (org.opencv.core.MatOfPoint)4 SurfaceTexture (android.graphics.SurfaceTexture)2 Camera (android.hardware.Camera)2 Line (com.disnodeteam.dogecv.math.Line)2 File (java.io.File)2 MatOfPoint2f (org.opencv.core.MatOfPoint2f)2 DrawerLayout (android.support.v4.widget.DrawerLayout)1 ActionBarDrawerToggle (android.support.v7.app.ActionBarDrawerToggle)1 SurfaceView (android.view.SurfaceView)1 View (android.view.View)1 AdapterView (android.widget.AdapterView)1 ListView (android.widget.ListView)1 RadioButton (android.widget.RadioButton)1 TextView (android.widget.TextView)1