Search in sources :

Example 1 with MetricCameras

use of boofcv.alg.geo.MetricCameras in project BoofCV by lessthanoptimal.

the class CommonProjectiveToMetricCamerasChecks method checkScene.

/**
 * @param singleCamera if true then the algorithm will be told all views come from one camera
 */
private void checkScene(boolean singleCamera) {
    ProjectiveToMetricCameras alg = createEstimator(singleCamera);
    assertTrue(alg.getMinimumViews() >= 1);
    assertTrue(alg.getMinimumViews() <= 3);
    List<ElevateViewInfo> views = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        int cameraIdx = singleCamera ? 0 : i;
        views.add(new ElevateViewInfo(imageWidth, imageHeight, cameraIdx));
    }
    List<DMatrixRMaj> inputCameras = new ArrayList<>();
    inputCameras.add(P2);
    inputCameras.add(P3);
    MetricCameras results = new MetricCameras();
    // Compute the model
    assertTrue(alg.process(views, inputCameras, observationsN, results));
    assertEquals(2, results.motion_1_to_k.size);
    assertEquals(3, results.intrinsics.size);
    // Check results
    checkEquals(cameraA, results.intrinsics.get(0));
    checkEquals(cameraB, results.intrinsics.get(1));
    checkEquals(cameraC, results.intrinsics.get(2));
    BoofTesting.assertEqualsToScaleS(truthView_1_to_i(1), results.motion_1_to_k.get(0), rotationTol, translationTol);
    BoofTesting.assertEqualsToScaleS(truthView_1_to_i(2), results.motion_1_to_k.get(1), rotationTol, translationTol);
    // The largest translation should be close to 1.0
    double largestTranslation = 0;
    for (Se3_F64 m : results.motion_1_to_k.toList()) {
        largestTranslation = Math.max(largestTranslation, m.T.norm());
    }
    assertEquals(1.0, largestTranslation, 0.001);
}
Also used : ArrayList(java.util.ArrayList) DMatrixRMaj(org.ejml.data.DMatrixRMaj) ElevateViewInfo(boofcv.struct.calib.ElevateViewInfo) MetricCameras(boofcv.alg.geo.MetricCameras) Se3_F64(georegression.struct.se.Se3_F64)

Example 2 with MetricCameras

use of boofcv.alg.geo.MetricCameras in project BoofCV by lessthanoptimal.

the class CommonProjectiveToMetricCamerasChecks method unexpected_number_of_dimensions.

/**
 * Incorrect number of input dimensions
 */
@Test
void unexpected_number_of_dimensions() {
    standardScene();
    simulateScene(0);
    List<ElevateViewInfo> views = new ArrayList<>();
    List<DMatrixRMaj> inputCameras = new ArrayList<>();
    for (int i = 0; i < 2; i++) {
        views.add(new ElevateViewInfo(imageWidth, imageHeight, i));
    }
    inputCameras.add(P2);
    inputCameras.add(P3);
    ProjectiveToMetricCameras alg = createEstimator(false);
    assertThrows(RuntimeException.class, () -> alg.process(views, inputCameras, observationsN, new MetricCameras()));
}
Also used : ArrayList(java.util.ArrayList) DMatrixRMaj(org.ejml.data.DMatrixRMaj) ElevateViewInfo(boofcv.struct.calib.ElevateViewInfo) MetricCameras(boofcv.alg.geo.MetricCameras) Test(org.junit.jupiter.api.Test)

Example 3 with MetricCameras

use of boofcv.alg.geo.MetricCameras in project BoofCV by lessthanoptimal.

the class TestMetricSpawnSceneFromView method saveMetricSeed.

@Test
void saveMetricSeed() {
    var graph = new PairwiseImageGraph();
    List<String> viewIds = BoofMiscOps.asList("A", "B", "C");
    var listInliers = new DogArray<>(DogArray_I32::new, DogArray_I32::reset);
    // specify the seed view
    listInliers.grow().setTo(1, 3, 5, 7, 9);
    int numInliers = listInliers.get(0).size;
    // create distinctive sets of inlier indexes for each view
    for (int otherIdx = 0; otherIdx < viewIds.size() - 1; otherIdx++) {
        DogArray_I32 inliers = listInliers.grow();
        for (int i = 0; i < numInliers; i++) {
            inliers.add(listInliers.get(0).get(i) + 1 + otherIdx);
        }
    }
    // Create some arbitrary metric results that should be saved
    var results = new MetricCameras();
    for (int viewIdx = 0; viewIdx < viewIds.size(); viewIdx++) {
        // skip zero since it's implicit
        if (viewIdx > 0)
            results.motion_1_to_k.grow().T.setTo(1, viewIdx, 0);
        CameraPinhole pinhole = results.intrinsics.grow();
        pinhole.fx = pinhole.fy = 100 + viewIdx;
        graph.createNode(viewIds.get(viewIdx));
    }
    var alg = new MetricSpawnSceneFromView();
    alg.utils.dbCams = new MockLookUpCameraInfo(800, 800);
    var wgraph = new SceneWorkingGraph();
    alg.saveMetricSeed(graph, viewIds, listInliers, results, wgraph);
    // Check the cameras. There should only be one
    assertEquals(1, wgraph.cameras.size());
    assertEquals(0, wgraph.cameras.get(0).indexDB);
    assertEquals(100, wgraph.cameras.get(0).intrinsic.f);
    // See metric view info got saved correctly
    BoofMiscOps.forIdx(viewIds, (idx, viewId) -> {
        PairwiseImageGraph.View pview = graph.lookupNode(viewId);
        assertTrue(wgraph.isKnown(pview));
        SceneWorkingGraph.View wview = wgraph.lookupView(viewId);
        assertEquals(idx == 0 ? 0 : 1, wview.world_to_view.T.x, 1e-8);
        assertEquals(idx, wview.world_to_view.T.y, 1e-8);
    });
    // See if inliers got saved correctly
    BoofMiscOps.forIdx(viewIds, (idx, viewId) -> {
        SceneWorkingGraph.View wview = wgraph.lookupView(viewId);
        assertEquals(1, wview.inliers.size);
        SceneWorkingGraph.InlierInfo inlier = wview.inliers.get(0);
        assertEquals(0.0, inlier.scoreGeometric, "Score should be unassigned");
        assertEquals(viewIds.size(), inlier.views.size);
        assertEquals(viewIds.size(), inlier.observations.size);
        assertEquals(viewId, inlier.views.get(0).id);
        for (int inlierIdx = 0; inlierIdx < viewIds.size(); inlierIdx++) {
            int offset = (idx + inlierIdx) % viewIds.size();
            final int c = offset;
            DogArray_I32 obs = inlier.observations.get(inlierIdx);
            obs.forIdx((i, value) -> assertEquals(i * 2 + 1 + c, value));
        }
    });
}
Also used : DogArray_I32(org.ddogleg.struct.DogArray_I32) DogArray(org.ddogleg.struct.DogArray) CameraPinhole(boofcv.struct.calib.CameraPinhole) MetricCameras(boofcv.alg.geo.MetricCameras) Test(org.junit.jupiter.api.Test)

Example 4 with MetricCameras

use of boofcv.alg.geo.MetricCameras in project BoofCV by lessthanoptimal.

the class ThreeViewEstimateMetricScene method projectiveToMetric.

/**
 * Estimates the transform from projective to metric geometry
 *
 * @return true if successful
 */
boolean projectiveToMetric() {
    // homography from projective to metric
    listPinhole.clear();
    boolean successfulSelfCalibration = false;
    if (manualFocalLength <= 0) {
        // Estimate calibration parameters
        var config = new ConfigSelfCalibDualQuadratic();
        // var config = new ConfigSelfCalibEssentialGuess();
        // config.numberOfSamples = 200;
        // config.fixedFocus = true;
        // config.sampleMin = 0.6;
        // config.sampleMax = 1.5;
        ProjectiveToMetricCameras selfcalib = FactoryMultiView.projectiveToMetric(config);
        List<ElevateViewInfo> views = new ArrayList<>();
        for (int i = 0; i < 3; i++) {
            views.add(new ElevateViewInfo(width, height, i));
        }
        List<DMatrixRMaj> cameras = new ArrayList<>();
        cameras.add(P2);
        cameras.add(P3);
        DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
        MultiViewOps.convertTr(ransac.getMatchSet(), observations);
        var results = new MetricCameras();
        boolean success = selfcalib.process(views, cameras, observations.toList(), results);
        if (success) {
            successfulSelfCalibration = true;
            listPinhole.addAll(results.intrinsics.toList());
            listWorldToView.get(0).reset();
            listWorldToView.get(1).setTo(results.motion_1_to_k.get(0));
            listWorldToView.get(2).setTo(results.motion_1_to_k.get(1));
            if (verbose != null)
                verbose.println("Auto calibration success");
        } else {
            if (verbose != null)
                verbose.println("Auto calibration failed");
        }
    }
    if (!successfulSelfCalibration) {
        // Use provided focal length or guess using an "average" focal length across cameras
        double focalLength = manualFocalLength <= 0 ? (double) (Math.max(width, height) / 2) : manualFocalLength;
        if (verbose != null)
            verbose.println("Assuming fixed focal length for all views. f=" + focalLength);
        final var estimateH = new TwoViewToCalibratingHomography();
        DMatrixRMaj F21 = MultiViewOps.projectiveToFundamental(P2, null);
        estimateH.initialize(F21, P2);
        DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(focalLength, focalLength, 0, 0, 0);
        DogArray<AssociatedPair> pairs = new DogArray<>(AssociatedPair::new);
        MultiViewOps.convertTr(ransac.getMatchSet(), 0, 1, pairs);
        if (!estimateH.process(K, K, pairs.toList()))
            throw new RuntimeException("Failed to estimate H given 'known' intrinsics");
        // Use the found calibration homography to find motion estimates
        DMatrixRMaj H = estimateH.getCalibrationHomography();
        listPinhole.clear();
        for (int i = 0; i < 3; i++) {
            listPinhole.add(PerspectiveOps.matrixToPinhole(K, width, height, null));
        }
        listWorldToView.get(0).reset();
        MultiViewOps.projectiveToMetric(P2, H, listWorldToView.get(1), K);
        MultiViewOps.projectiveToMetric(P3, H, listWorldToView.get(2), K);
    }
    if (verbose != null) {
        verbose.println("Initial Intrinsic Estimate:");
        for (int i = 0; i < 3; i++) {
            CameraPinhole r = listPinhole.get(i);
            verbose.printf("  fx = %6.1f, fy = %6.1f, skew = %6.3f\n", r.fx, r.fy, r.skew);
        }
        verbose.println("Initial Motion Estimate:");
    }
    // scale is arbitrary. Set max translation to 1
    double maxT = 0;
    for (int i = 0; i < listWorldToView.size(); i++) {
        Se3_F64 world_to_view = listWorldToView.get(i);
        maxT = Math.max(maxT, world_to_view.T.norm());
    }
    for (int i = 0; i < listWorldToView.size(); i++) {
        Se3_F64 world_to_view = listWorldToView.get(i);
        world_to_view.T.scale(1.0 / maxT);
        if (verbose != null) {
            Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(world_to_view.R, null);
            verbose.println("  T=" + world_to_view.T + "  R=" + rod);
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) DMatrixRMaj(org.ejml.data.DMatrixRMaj) DogArray(org.ddogleg.struct.DogArray) CameraPinhole(boofcv.struct.calib.CameraPinhole) VerbosePrint(org.ddogleg.struct.VerbosePrint) MetricCameras(boofcv.alg.geo.MetricCameras) ProjectiveToMetricCameras(boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras) TwoViewToCalibratingHomography(boofcv.alg.geo.selfcalib.TwoViewToCalibratingHomography) ProjectiveToMetricCameras(boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras) Rodrigues_F64(georegression.struct.so.Rodrigues_F64) ElevateViewInfo(boofcv.struct.calib.ElevateViewInfo) Se3_F64(georegression.struct.se.Se3_F64)

Example 5 with MetricCameras

use of boofcv.alg.geo.MetricCameras in project BoofCV by lessthanoptimal.

the class CommonProjectiveToMetricCamerasChecks method real_world_case0.

/**
 * In this situation a scene was created where points appeared behind the camera. Taken from real data
 */
@Test
void real_world_case0() {
    DMatrixRMaj P2 = new DMatrixRMaj(3, 4, true, 71.2714309, -1.50598476, -354.50553, -.052935998, -1.28683386, 39.1891727, 672.658283, -.994592935, .00056663, -.019338274, 70.0397946, -.000445996);
    DMatrixRMaj P3 = new DMatrixRMaj(3, 4, true, 32.4647875, -1.02054892, -241.805355, -.054715714, -1.8370892, -.061992654, .486096194, -1.00684043, .000185405, -.010046842, 31.8668685, -.000209807);
    DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
    // These are in front of both cameras
    add(-47.208221435546875, -14.024078369140625, -49.9302978515625, 36.35797119140625, -50.079071044921875, 77.59286499023438, observations);
    add(-203.9057159423828, 70.39932250976562, -207.64544677734375, 124.38552856445312, -206.31866455078125, 172.38186645507812, observations);
    add(-362.7781524658203, -218.54442596435547, -361.6542053222656, -160.6702880859375, -363.30285263061523, -107.35969543457031, observations);
    add(-154.99310302734375, 3.35784912109375, -158.14512634277344, 55.362579345703125, -157.7862548828125, 100.77597045898438, observations);
    add(-170.89407348632812, -181.27266693115234, -172.10398864746094, -127.54672241210938, -174.48524475097656, -81.65957641601562, observations);
    add(41.3905029296875, 170.15188598632812, 39.365081787109375, 221.3468017578125, 43.634307861328125, 261.2353515625, observations);
    add(-350.1354789733887, -229.5992660522461, -349.162899017334, -171.76145935058594, -351.1237335205078, -118.83564758300781, observations);
    add(-50.12109375, -14.451873779296875, -52.87139892578125, 35.835052490234375, -53.014801025390625, 77.25506591796875, observations);
    add(-250.23069763183594, -212.5504379272461, -250.5589599609375, -156.41912841796875, -252.87100219726562, -107.27978515625, observations);
    // These are behind at least one camera
    add(154.89532470703125, -21.821807861328125, 151.21435546875, 41.2327880859375, 151.974365234375, 93.64697265625, observations);
    add(226.85003662109375, -95.77021789550781, 221.5345458984375, -35.9564208984375, 219.90155029296875, 12.154052734375, observations);
    add(237.870361328125, -46.12437438964844, 232.88519287109375, 13.570709228515625, 232.98577880859375, 61.028564453125, observations);
    add(162.7314453125, -165.1600341796875, 156.9556884765625, -99.56578063964844, 154.2447509765625, -45.94012451171875, observations);
    add(283.9959716796875, -147.1155242919922, 276.13848876953125, -86.35987854003906, 273.4132080078125, -40.23883056640625, observations);
    add(135.57574462890625, -232.8561019897461, 129.67437744140625, -163.39407348632812, 125.60736083984375, -107.20663452148438, observations);
    add(-21.8720703125, -162.5299530029297, -24.70025634765625, -101.63801574707031, -27.263427734375, -50.05320739746094, observations);
    add(62.40008544921875, -173.78022003173828, 59.92376708984375, -105.06491088867188, 56.91351318359375, -45.15827941894531, observations);
    add(-63.860626220703125, -259.0756492614746, -65.89141845703125, -195.2255096435547, -69.55535888671875, -142.1841278076172, observations);
    List<ElevateViewInfo> views = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        views.add(new ElevateViewInfo(800, 600, i));
    }
    List<DMatrixRMaj> inputCameras = new ArrayList<>();
    inputCameras.add(P2);
    inputCameras.add(P3);
    var results = new MetricCameras();
    ProjectiveToMetricCameras alg = createEstimator(false);
    assertTrue(alg.process(views, inputCameras, observations.toList(), results));
    // Yes internally most implementations run this function, but the number of invalid was > 0 before
    var checkMatches = new ResolveSignAmbiguityPositiveDepth();
    checkMatches.process(observations.toList(), results);
    assertFalse(checkMatches.signChanged);
    assertEquals(0, checkMatches.bestInvalid);
}
Also used : DMatrixRMaj(org.ejml.data.DMatrixRMaj) AssociatedTupleN(boofcv.struct.geo.AssociatedTupleN) ArrayList(java.util.ArrayList) DogArray(org.ddogleg.struct.DogArray) MetricCameras(boofcv.alg.geo.MetricCameras) AssociatedTuple(boofcv.struct.geo.AssociatedTuple) ResolveSignAmbiguityPositiveDepth(boofcv.alg.geo.selfcalib.ResolveSignAmbiguityPositiveDepth) ElevateViewInfo(boofcv.struct.calib.ElevateViewInfo) Test(org.junit.jupiter.api.Test)

Aggregations

MetricCameras (boofcv.alg.geo.MetricCameras)6 ElevateViewInfo (boofcv.struct.calib.ElevateViewInfo)5 ArrayList (java.util.ArrayList)5 DMatrixRMaj (org.ejml.data.DMatrixRMaj)5 Test (org.junit.jupiter.api.Test)4 DogArray (org.ddogleg.struct.DogArray)3 CameraPinhole (boofcv.struct.calib.CameraPinhole)2 Se3_F64 (georegression.struct.se.Se3_F64)2 ProjectiveToMetricCameras (boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras)1 ResolveSignAmbiguityPositiveDepth (boofcv.alg.geo.selfcalib.ResolveSignAmbiguityPositiveDepth)1 TwoViewToCalibratingHomography (boofcv.alg.geo.selfcalib.TwoViewToCalibratingHomography)1 AssociatedTuple (boofcv.struct.geo.AssociatedTuple)1 AssociatedTupleN (boofcv.struct.geo.AssociatedTupleN)1 Rodrigues_F64 (georegression.struct.so.Rodrigues_F64)1 DogArray_I32 (org.ddogleg.struct.DogArray_I32)1 VerbosePrint (org.ddogleg.struct.VerbosePrint)1