Search in sources :

Example 46 with Planar

use of boofcv.struct.image.Planar in project BoofCV by lessthanoptimal.

the class TestDescribePointSurfPlanar method failNumBandMissMatch.

@Test(expected = IllegalArgumentException.class)
public void failNumBandMissMatch() {
    Planar<GrayF32> input = new Planar<>(GrayF32.class, width, height, 3);
    GImageMiscOps.addUniform(input, rand, 0, 200);
    DescribePointSurf<GrayF32> desc = new DescribePointSurf<>(GrayF32.class);
    DescribePointSurfPlanar<GrayF32> alg = new DescribePointSurfPlanar<>(desc, 2);
    GrayF32 gray = ConvertImage.average(input, null);
    alg.setImage(gray, input);
}
Also used : GrayF32(boofcv.struct.image.GrayF32) Planar(boofcv.struct.image.Planar) Test(org.junit.Test)

Example 47 with Planar

use of boofcv.struct.image.Planar in project BoofCV by lessthanoptimal.

the class TestDetectDescribeSurfPlanar method basicTest.

@Test
public void basicTest() {
    Planar<GrayF32> input = new Planar<>(GrayF32.class, width, height, 3);
    GImageMiscOps.addUniform(input, rand, 0, 200);
    DescribePointSurf<GrayF32> desc = new DescribePointSurf<>(GrayF32.class);
    DescribePointSurfPlanar<GrayF32> descMulti = new DescribePointSurfPlanar<>(desc, 3);
    FastHessianFeatureDetector<GrayF32> detector = FactoryInterestPointAlgs.fastHessian(null);
    OrientationIntegral<GrayF32> orientation = FactoryOrientationAlgs.sliding_ii(null, GrayF32.class);
    DetectDescribeSurfPlanar<GrayF32> alg = new DetectDescribeSurfPlanar<>(detector, orientation, descMulti);
    GrayF32 gray = ConvertImage.average(input, null);
    // see if it detects the same number of points
    detector.detect(gray);
    int expected = detector.getFoundPoints().size();
    alg.detect(gray, input);
    assertEquals(expected, alg.getNumberOfFeatures());
// could improve this unit test by checking scale and orientation
}
Also used : GrayF32(boofcv.struct.image.GrayF32) DescribePointSurf(boofcv.alg.feature.describe.DescribePointSurf) DescribePointSurfPlanar(boofcv.alg.feature.describe.DescribePointSurfPlanar) Planar(boofcv.struct.image.Planar) DescribePointSurfPlanar(boofcv.alg.feature.describe.DescribePointSurfPlanar) Test(org.junit.Test)

Example 48 with Planar

use of boofcv.struct.image.Planar in project MAVSlam by ecmnet.

the class StreamRealSenseTest method start.

@Override
public void start(Stage primaryStage) {
    primaryStage.setTitle("BoofCV RealSense Demo");
    FlowPane root = new FlowPane();
    root.getChildren().add(ivrgb);
    ivrgb.setOnMouseMoved(event -> {
        MouseEvent ev = event;
        mouse_x = (int) ev.getX();
        mouse_y = (int) ev.getY();
    });
    // RealSenseInfo info = new RealSenseInfo(320,240, RealSenseInfo.MODE_RGB);
    RealSenseInfo info = new RealSenseInfo(640, 480, RealSenseInfo.MODE_RGB);
    try {
        realsense = new StreamRealSenseVisDepth(0, info);
    } catch (Exception e) {
        System.out.println("REALSENSE:" + e.getMessage());
        return;
    }
    mouse_x = info.width / 2;
    mouse_y = info.height / 2;
    primaryStage.setScene(new Scene(root, info.width, info.height));
    primaryStage.show();
    PkltConfig configKlt = new PkltConfig();
    configKlt.pyramidScaling = new int[] { 1, 2, 4, 8 };
    configKlt.templateRadius = 3;
    PointTrackerTwoPass<GrayU8> tracker = FactoryPointTrackerTwoPass.klt(configKlt, new ConfigGeneralDetector(900, 2, 1), GrayU8.class, GrayS16.class);
    DepthSparse3D<GrayU16> sparseDepth = new DepthSparse3D.I<GrayU16>(1e-3);
    // declares the algorithm
    MAVDepthVisualOdometry<GrayU8, GrayU16> visualOdometry = FactoryMAVOdometry.depthDepthPnP(1.2, 120, 2, 200, 50, true, sparseDepth, tracker, GrayU8.class, GrayU16.class);
    visualOdometry.setCalibration(realsense.getIntrinsics(), new DoNothingPixelTransform_F32());
    output = new BufferedImage(info.width, info.height, BufferedImage.TYPE_3BYTE_BGR);
    wirgb = new WritableImage(info.width, info.height);
    ivrgb.setImage(wirgb);
    realsense.registerListener(new Listener() {

        int fps;

        float mouse_depth;

        float md;

        int mc;

        int mf = 0;

        int fpm;

        @Override
        public void process(Planar<GrayU8> rgb, GrayU16 depth, long timeRgb, long timeDepth) {
            if ((System.currentTimeMillis() - tms) > 250) {
                tms = System.currentTimeMillis();
                if (mf > 0)
                    fps = fpm / mf;
                if (mc > 0)
                    mouse_depth = md / mc;
                mc = 0;
                md = 0;
                mf = 0;
                fpm = 0;
            }
            mf++;
            fpm += (int) (1f / ((timeRgb - oldTimeDepth) / 1000f) + 0.5f);
            oldTimeDepth = timeRgb;
            if (!visualOdometry.process(rgb.getBand(0), depth)) {
                bus1.writeObject(position);
                System.out.println("VO Failed!");
                visualOdometry.reset();
                return;
            }
            Se3_F64 leftToWorld = visualOdometry.getCameraToWorld();
            Vector3D_F64 T = leftToWorld.getT();
            AccessPointTracks3D points = (AccessPointTracks3D) visualOdometry;
            ConvertBufferedImage.convertTo(rgb, output, false);
            Graphics c = output.getGraphics();
            int count = 0;
            float total = 0;
            int dx = 0, dy = 0;
            int dist = 999;
            int x, y;
            int index = -1;
            for (int i = 0; i < points.getAllTracks().size(); i++) {
                if (points.isInlier(i)) {
                    c.setColor(Color.BLUE);
                    x = (int) points.getAllTracks().get(i).x;
                    y = (int) points.getAllTracks().get(i).y;
                    int d = depth.get(x, y);
                    if (d > 0) {
                        int di = (int) Math.sqrt((x - mouse_x) * (x - mouse_x) + (y - mouse_y) * (y - mouse_y));
                        if (di < dist) {
                            index = i;
                            dx = x;
                            dy = y;
                            dist = di;
                        }
                        total++;
                        if (d < 500) {
                            c.setColor(Color.RED);
                            count++;
                        }
                        c.drawRect(x, y, 1, 1);
                    }
                }
            }
            if (depth != null) {
                if (index > -1)
                    System.out.println(visualOdometry.getTrackLocation(index));
                mc++;
                md = md + depth.get(dx, dy) / 1000f;
                c.setColor(Color.GREEN);
                c.drawOval(dx - 3, dy - 3, 6, 6);
            }
            c.setColor(Color.CYAN);
            c.drawString("Fps:" + fps, 10, 20);
            c.drawString(String.format("Loc: %4.2f %4.2f %4.2f", T.x, T.y, T.z), 10, info.height - 10);
            c.drawString(String.format("Depth: %3.2f", mouse_depth), info.width - 85, info.height - 10);
            position.x = T.x;
            position.y = T.y;
            position.z = T.z;
            position.tms = timeRgb;
            bus1.writeObject(position);
            if ((count / total) > 0.6f) {
                c.setColor(Color.RED);
                c.drawString("WARNING!", info.width - 70, 20);
            }
            c.dispose();
            Platform.runLater(() -> {
                SwingFXUtils.toFXImage(output, wirgb);
            });
        }
    }).start();
}
Also used : Listener(com.comino.realsense.boofcv.StreamRealSenseVisDepth.Listener) PkltConfig(boofcv.alg.tracker.klt.PkltConfig) ConfigGeneralDetector(boofcv.abst.feature.detect.interest.ConfigGeneralDetector) AccessPointTracks3D(boofcv.abst.sfm.AccessPointTracks3D) BufferedImage(java.awt.image.BufferedImage) ConvertBufferedImage(boofcv.io.image.ConvertBufferedImage) FlowPane(javafx.scene.layout.FlowPane) Planar(boofcv.struct.image.Planar) GrayU8(boofcv.struct.image.GrayU8) MouseEvent(javafx.scene.input.MouseEvent) GrayU16(boofcv.struct.image.GrayU16) Scene(javafx.scene.Scene) Graphics(java.awt.Graphics) WritableImage(javafx.scene.image.WritableImage) DoNothingPixelTransform_F32(boofcv.alg.distort.DoNothingPixelTransform_F32) Vector3D_F64(georegression.struct.point.Vector3D_F64) Se3_F64(georegression.struct.se.Se3_F64)

Example 49 with Planar

use of boofcv.struct.image.Planar in project opennars by opennars.

the class RasterHierachy method rasterizeImage.

public BufferedImage rasterizeImage(BufferedImage input) {
    voter = new HashMap<>();
    // vladimir
    boolean putin = false;
    cnt--;
    if (cnt == 0) {
        putin = true;
        cnt = updaterate;
    }
    int red, green, blue;
    int redSum, greenSum, blueSum;
    int x, y, startX, startY;
    int newX, newY;
    int width = input.getWidth();
    int height = input.getHeight();
    int blockXSize = width / divisions;
    int blockYSize = height / divisions;
    Planar<GrayU8> image = ConvertBufferedImage.convertFromMulti(input, null, true, GrayU8.class);
    Planar<GrayU8> output = new Planar<>(GrayU8.class, width, height, 3);
    BufferedImage rasterizedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
    // Set the initial raster region
    int regionWidth = width;
    int regionHeight = height;
    newX = 0;
    newY = 0;
    startX = 0;
    startY = 0;
    for (int step = 1; step <= numberRasters; step++) {
        if (step > 1) {
            newX = startX + (regionWidth - regionWidth / scalingFactor) / scalingFactor;
            newY = startY + (regionHeight - regionHeight / scalingFactor) / scalingFactor;
            if (newX < 0) {
                newX = 0;
            }
            if (newY < 0) {
                newY = 0;
            }
            regionWidth = regionWidth / scalingFactor;
            regionHeight = regionHeight / scalingFactor;
            blockXSize = blockXSize / scalingFactor;
            blockYSize = blockYSize / scalingFactor;
            if (blockXSize < 1) {
                blockXSize = 1;
            }
            if (blockYSize < 1) {
                blockYSize = 1;
            }
        }
        // Set the starting point for the next step
        startX = focusX - ((regionWidth) / 2);
        startY = focusY - ((regionHeight) / 2);
        // Number of pixels per block
        int pixelCount = blockXSize * blockYSize;
        even = !even;
        int h = 0, j = 0;
        for (x = newX; x < ((step == 1 ? 0 : startX) + regionWidth); x += blockXSize) {
            h++;
            j = 0;
            for (y = newY; y < ((step == 1 ? 0 : startY) + regionHeight); y += blockYSize) {
                j++;
                redSum = 0;
                greenSum = 0;
                blueSum = 0;
                for (int pixelX = 0; (pixelX < blockXSize) && (x + pixelX < width); pixelX++) {
                    for (int pixelY = 0; (pixelY < blockYSize) && (y + pixelY < height); pixelY++) {
                        redSum += image.getBand(0).get(x + pixelX, y + pixelY);
                        greenSum += image.getBand(1).get(x + pixelX, y + pixelY);
                        blueSum += image.getBand(2).get(x + pixelX, y + pixelY);
                    }
                }
                red = redSum / pixelCount;
                green = greenSum / pixelCount;
                blue = blueSum / pixelCount;
                float fred = ((float) red) / 255.0f;
                float fgreen = ((float) red) / 255.0f;
                float fblue = ((float) red) / 255.0f;
                // manage move heuristic
                // maybe not needed
                int brightness = (red + green + blue) / 3;
                int key = step + 10 * x + 10000 * y;
                if (lastvalR.containsKey(key) && putin) {
                    double area = blockXSize * blockYSize;
                    double diff = Math.abs(fred - (lastvalR.get(key))) + Math.abs(fgreen - (lastvalG.get(key))) + Math.abs(fblue - (lastvalB.get(key)));
                    // / area;
                    double vote = diff * area;
                    // vote*=step;
                    voter.put(key, new Value(step, x + blockXSize / 2, y + blockYSize / 2, vote));
                }
                lastvalR.put(key, fred);
                lastvalG.put(key, fgreen);
                lastvalB.put(key, fblue);
                // approx 6 inputs per image
                float inputChance = 0.33f;
                if (putin && step == numberRasters) {
                    // only most finest raster
                    // input Narsese translation //String.valueOf(step)
                    String st = "<r_" + String.valueOf(h) + "_" + String.valueOf(j) + " --> RED>. :|: %" + String.valueOf(fred) + "%";
                    // String st="<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> RED>. :|: %"+String.valueOf(fred)+"%";
                    if (((h % 2 == 0 && j % 2 == 0 && even) || (h % 2 == 1 && j % 2 == 1 && !even)) && Math.random() < inputChance) {
                        // not every pixel needs to go in
                        nar.addInput(st);
                    }
                    // focal point:
                    if (x == 0 && y == 0) {
                        nar.addInput("<p_" + String.valueOf(this.focusX / 100) + " --> pointX>. :|:");
                        nar.addInput("<p_" + String.valueOf(this.focusY / 100) + " --> pointY>. :|:");
                    }
                    nar.cycles(10);
                }
                // Here we can generate NAL, since we know all of the required values.
                ImageMiscOps.fillRectangle(output.getBand(0), red, x, y, blockXSize, blockYSize);
                ImageMiscOps.fillRectangle(output.getBand(1), green, x, y, blockXSize, blockYSize);
                ImageMiscOps.fillRectangle(output.getBand(2), blue, x, y, blockXSize, blockYSize);
            }
        }
    }
    // search for maximum vote to move heuristic
    if (putin) {
        Value maxvalue = null;
        float threshold = 0.05f;
        for (Integer key : voter.keySet()) {
            Value value = voter.get(key);
            if (maxvalue == null || value.value > maxvalue.value) {
                if (value.value > threshold)
                    maxvalue = value;
            }
        }
        if (maxvalue != null && maxvalue.x != 0 && maxvalue.y != 0) {
            this.setFocus((this.focusX + maxvalue.x) / 2, (this.focusY + maxvalue.y) / 2);
        // this.setFocus(maxvalue.x, maxvalue.y);
        }
    }
    ConvertBufferedImage.convertTo(output, rasterizedImage, true);
    return rasterizedImage;
}
Also used : Planar(boofcv.struct.image.Planar) GrayU8(boofcv.struct.image.GrayU8) BufferedImage(java.awt.image.BufferedImage)

Example 50 with Planar

use of boofcv.struct.image.Planar in project BoofCV by lessthanoptimal.

the class TestDescribePlanar method various.

@Test
public void various() {
    DescribeRegionPoint[] descs = new DummyDesc[3];
    descs[0] = new DummyDesc();
    descs[1] = new DummyDesc();
    descs[2] = new DummyDesc();
    DummyAlg alg = new DummyAlg(descs);
    alg.setImage(new Planar(GrayS8.class, 1, 1, 3));
    alg.process(0, 1, 2, 3, alg.createDescription());
    assertEquals(30, alg.createDescription().size());
    assertEquals(1, alg.numCombined);
    for (int i = 0; i < 3; i++) {
        assertEquals(1, ((DummyDesc) descs[i]).numProcess);
    }
}
Also used : GrayS8(boofcv.struct.image.GrayS8) DescribeRegionPoint(boofcv.abst.feature.describe.DescribeRegionPoint) Planar(boofcv.struct.image.Planar) DescribeRegionPoint(boofcv.abst.feature.describe.DescribeRegionPoint) Test(org.junit.Test)

Aggregations

Planar (boofcv.struct.image.Planar)73 Test (org.junit.Test)39 GrayF32 (boofcv.struct.image.GrayF32)34 GrayU8 (boofcv.struct.image.GrayU8)28 BufferedImage (java.awt.image.BufferedImage)21 ConvertBufferedImage (boofcv.io.image.ConvertBufferedImage)20 RectangleLength2D_I32 (georegression.struct.shapes.RectangleLength2D_I32)12 File (java.io.File)9 Bitmap (android.graphics.Bitmap)4 ListDisplayPanel (boofcv.gui.ListDisplayPanel)4 ImageGray (boofcv.struct.image.ImageGray)4 ConfigGeneralDetector (boofcv.abst.feature.detect.interest.ConfigGeneralDetector)3 LensDistortionUniversalOmni (boofcv.alg.distort.universal.LensDistortionUniversalOmni)3 MediaManager (boofcv.io.MediaManager)3 DefaultMediaManager (boofcv.io.wrapper.DefaultMediaManager)3 CameraPinhole (boofcv.struct.calib.CameraPinhole)3 GrayU16 (boofcv.struct.image.GrayU16)3 Homography2D_F64 (georegression.struct.homography.Homography2D_F64)3 Se3_F64 (georegression.struct.se.Se3_F64)3 DescribeRegionPoint (boofcv.abst.feature.describe.DescribeRegionPoint)2