Search in sources :

Example 1 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestVisualDepthOps method depthTo3D_with_rgb.

@Test
public void depthTo3D_with_rgb() {
    GrayU16 depth = new GrayU16(width, height);
    depth.set(200, 80, 3400);
    depth.set(600, 420, 50);
    Planar<GrayU8> rgb = new Planar<>(GrayU8.class, width, height, 3);
    GImageMiscOps.fillUniform(rgb, rand, 0, 200);
    FastQueue<Point3D_F64> pts = new FastQueue<>(Point3D_F64.class, true);
    FastQueueArray_I32 color = new FastQueueArray_I32(3);
    VisualDepthOps.depthTo3D(param, rgb, depth, pts, color);
    assertEquals(2, pts.size());
    assertEquals(2, color.size());
    assertEquals(0, compute(200, 80, 3400).distance(pts.get(0)), 1e-8);
    assertEquals(0, compute(600, 420, 50).distance(pts.get(1)), 1e-8);
    color(200, 80, rgb, color.get(0));
    color(600, 420, rgb, color.get(1));
}
Also used : Point3D_F64(georegression.struct.point.Point3D_F64) GrayU16(boofcv.struct.image.GrayU16) FastQueue(org.ddogleg.struct.FastQueue) Planar(boofcv.struct.image.Planar) GrayU8(boofcv.struct.image.GrayU8) FastQueueArray_I32(boofcv.struct.FastQueueArray_I32) Test(org.junit.Test)

Example 2 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestHistogramFeatureOps method histogram_U16.

@Test
public void histogram_U16() {
    GrayU16 image = new GrayU16(width, height);
    image.set(2, 3, 40000);
    image.set(2, 4, 40000);
    image.set(2, 5, 40000);
    image.set(2, 6, 40000);
    image.set(5, 6, 65535);
    image.set(5, 7, 65535);
    TupleDesc_F64 histogram = new TupleDesc_F64(256);
    randomFill(histogram);
    double[] expected = new double[256];
    expected[0] = width * height - 6;
    expected[156] = 4.0;
    expected[255] = 2.0;
    HistogramFeatureOps.histogram(image, 65535, histogram);
    checkEquals(histogram, expected);
}
Also used : TupleDesc_F64(boofcv.struct.feature.TupleDesc_F64) GrayU16(boofcv.struct.image.GrayU16) Test(org.junit.Test)

Example 3 with GrayU16

use of boofcv.struct.image.GrayU16 in project MAVSlam by ecmnet.

the class StreamRealSenseTest method start.

@Override
public void start(Stage primaryStage) {
    primaryStage.setTitle("BoofCV RealSense Demo");
    FlowPane root = new FlowPane();
    root.getChildren().add(ivrgb);
    ivrgb.setOnMouseMoved(event -> {
        MouseEvent ev = event;
        mouse_x = (int) ev.getX();
        mouse_y = (int) ev.getY();
    });
    // RealSenseInfo info = new RealSenseInfo(320,240, RealSenseInfo.MODE_RGB);
    RealSenseInfo info = new RealSenseInfo(640, 480, RealSenseInfo.MODE_RGB);
    try {
        realsense = new StreamRealSenseVisDepth(0, info);
    } catch (Exception e) {
        System.out.println("REALSENSE:" + e.getMessage());
        return;
    }
    mouse_x = info.width / 2;
    mouse_y = info.height / 2;
    primaryStage.setScene(new Scene(root, info.width, info.height));
    primaryStage.show();
    PkltConfig configKlt = new PkltConfig();
    configKlt.pyramidScaling = new int[] { 1, 2, 4, 8 };
    configKlt.templateRadius = 3;
    PointTrackerTwoPass<GrayU8> tracker = FactoryPointTrackerTwoPass.klt(configKlt, new ConfigGeneralDetector(900, 2, 1), GrayU8.class, GrayS16.class);
    DepthSparse3D<GrayU16> sparseDepth = new DepthSparse3D.I<GrayU16>(1e-3);
    // declares the algorithm
    MAVDepthVisualOdometry<GrayU8, GrayU16> visualOdometry = FactoryMAVOdometry.depthDepthPnP(1.2, 120, 2, 200, 50, true, sparseDepth, tracker, GrayU8.class, GrayU16.class);
    visualOdometry.setCalibration(realsense.getIntrinsics(), new DoNothingPixelTransform_F32());
    output = new BufferedImage(info.width, info.height, BufferedImage.TYPE_3BYTE_BGR);
    wirgb = new WritableImage(info.width, info.height);
    ivrgb.setImage(wirgb);
    realsense.registerListener(new Listener() {

        int fps;

        float mouse_depth;

        float md;

        int mc;

        int mf = 0;

        int fpm;

        @Override
        public void process(Planar<GrayU8> rgb, GrayU16 depth, long timeRgb, long timeDepth) {
            if ((System.currentTimeMillis() - tms) > 250) {
                tms = System.currentTimeMillis();
                if (mf > 0)
                    fps = fpm / mf;
                if (mc > 0)
                    mouse_depth = md / mc;
                mc = 0;
                md = 0;
                mf = 0;
                fpm = 0;
            }
            mf++;
            fpm += (int) (1f / ((timeRgb - oldTimeDepth) / 1000f) + 0.5f);
            oldTimeDepth = timeRgb;
            if (!visualOdometry.process(rgb.getBand(0), depth)) {
                bus1.writeObject(position);
                System.out.println("VO Failed!");
                visualOdometry.reset();
                return;
            }
            Se3_F64 leftToWorld = visualOdometry.getCameraToWorld();
            Vector3D_F64 T = leftToWorld.getT();
            AccessPointTracks3D points = (AccessPointTracks3D) visualOdometry;
            ConvertBufferedImage.convertTo(rgb, output, false);
            Graphics c = output.getGraphics();
            int count = 0;
            float total = 0;
            int dx = 0, dy = 0;
            int dist = 999;
            int x, y;
            int index = -1;
            for (int i = 0; i < points.getAllTracks().size(); i++) {
                if (points.isInlier(i)) {
                    c.setColor(Color.BLUE);
                    x = (int) points.getAllTracks().get(i).x;
                    y = (int) points.getAllTracks().get(i).y;
                    int d = depth.get(x, y);
                    if (d > 0) {
                        int di = (int) Math.sqrt((x - mouse_x) * (x - mouse_x) + (y - mouse_y) * (y - mouse_y));
                        if (di < dist) {
                            index = i;
                            dx = x;
                            dy = y;
                            dist = di;
                        }
                        total++;
                        if (d < 500) {
                            c.setColor(Color.RED);
                            count++;
                        }
                        c.drawRect(x, y, 1, 1);
                    }
                }
            }
            if (depth != null) {
                if (index > -1)
                    System.out.println(visualOdometry.getTrackLocation(index));
                mc++;
                md = md + depth.get(dx, dy) / 1000f;
                c.setColor(Color.GREEN);
                c.drawOval(dx - 3, dy - 3, 6, 6);
            }
            c.setColor(Color.CYAN);
            c.drawString("Fps:" + fps, 10, 20);
            c.drawString(String.format("Loc: %4.2f %4.2f %4.2f", T.x, T.y, T.z), 10, info.height - 10);
            c.drawString(String.format("Depth: %3.2f", mouse_depth), info.width - 85, info.height - 10);
            position.x = T.x;
            position.y = T.y;
            position.z = T.z;
            position.tms = timeRgb;
            bus1.writeObject(position);
            if ((count / total) > 0.6f) {
                c.setColor(Color.RED);
                c.drawString("WARNING!", info.width - 70, 20);
            }
            c.dispose();
            Platform.runLater(() -> {
                SwingFXUtils.toFXImage(output, wirgb);
            });
        }
    }).start();
}
Also used : Listener(com.comino.realsense.boofcv.StreamRealSenseVisDepth.Listener) PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) AccessPointTracks3D(boofcv.abst.sfm.AccessPointTracks3D) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) FlowPane(javafx.scene.layout.FlowPane) Planar(boofcv.struct.image.Planar) GrayU8(boofcv.struct.image.GrayU8) MouseEvent(javafx.scene.input.MouseEvent) GrayU16(boofcv.struct.image.GrayU16) Scene(javafx.scene.Scene) Graphics(java.awt.Graphics) WritableImage(javafx.scene.image.WritableImage) DoNothingPixelTransform_F32(boofcv.alg.distort.DoNothingPixelTransform_F32) Vector3D_F64(georegression.struct.point.Vector3D_F64) Se3_F64(georegression.struct.se.Se3_F64)

Example 4 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestFilterImageReflection method basicTest2.

/**
 * Checks to see if the provided function is invoked and that it returned the correct border
 */
@Test
public void basicTest2() {
    FilterImageReflection filter = new FilterImageReflection(getClass(), "methodDummy", 2, 3, GrayU8.class, GrayU16.class);
    GrayU8 in = new GrayU8(5, 5);
    GrayU16 out = new GrayU16(5, 5);
    filter.process(in, out);
    assertEquals(2, filter.getHorizontalBorder());
    assertEquals(3, filter.getVerticalBorder());
    assertTrue(GrayU8.class == filter.getInputType().getImageClass());
    assertTrue(GrayU16.class == filter.getOutputType().getImageClass());
    assertEquals(1, out.get(0, 0));
}
Also used : GrayU16(boofcv.struct.image.GrayU16) GrayU8(boofcv.struct.image.GrayU8) Test(org.junit.Test)

Example 5 with GrayU16

use of boofcv.struct.image.GrayU16 in project BoofCV by lessthanoptimal.

the class TestFilterImageReflection method basicTest3.

/**
 * Some filters have a parameter which specify the number of times it is invoked
 */
@Test
public void basicTest3() {
    FilterImageReflection filter = new FilterImageReflection(getClass(), "methodDummy2", 2, 3, GrayU8.class, GrayU16.class);
    GrayU8 in = new GrayU8(5, 5);
    GrayU16 out = new GrayU16(5, 5);
    filter.process(in, out);
    assertEquals(2, filter.getHorizontalBorder());
    assertEquals(3, filter.getVerticalBorder());
    assertTrue(GrayU8.class == filter.getInputType().getImageClass());
    assertTrue(GrayU16.class == filter.getOutputType().getImageClass());
    assertEquals(1, out.get(0, 0));
}
Also used : GrayU16(boofcv.struct.image.GrayU16) GrayU8(boofcv.struct.image.GrayU8) Test(org.junit.Test)

Aggregations

GrayU16 (boofcv.struct.image.GrayU16)16 GrayU8 (boofcv.struct.image.GrayU8)12 Test (org.junit.Test)8 Point3D_F64 (georegression.struct.point.Point3D_F64)6 FastQueue (org.ddogleg.struct.FastQueue)5 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)4 FastQueueArray_I32 (boofcv.struct.FastQueueArray_I32)4 BufferedImage (java.awt.image.BufferedImage)4 ConfigGeneralDetector (boofcv.abst.feature.detect.interest.ConfigGeneralDetector)3 PkltConfig (boofcv.alg.tracker.klt.PkltConfig)3 CameraPinholeRadial (boofcv.struct.calib.CameraPinholeRadial)3 Planar (boofcv.struct.image.Planar)3 File (java.io.File)3 PointCloudViewer (boofcv.gui.d3.PointCloudViewer)2 VisualDepthParameters (boofcv.struct.calib.VisualDepthParameters)2 Vector3D_F64 (georegression.struct.point.Vector3D_F64)2 Se3_F64 (georegression.struct.se.Se3_F64)2 DMatrixRMaj (org.ejml.data.DMatrixRMaj)2 AccessPointTracks3D (boofcv.abst.sfm.AccessPointTracks3D)1 DoNothingPixelTransform_F32 (boofcv.alg.distort.DoNothingPixelTransform_F32)1