Search in sources :

Example 1 with ProjectiveToMetricCameras

use of boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras in project BoofCV by lessthanoptimal.

the class ThreeViewEstimateMetricScene method projectiveToMetric.

/**
 * Estimates the transform from projective to metric geometry
 *
 * @return true if successful
 */
boolean projectiveToMetric() {
    // homography from projective to metric
    listPinhole.clear();
    boolean successfulSelfCalibration = false;
    if (manualFocalLength <= 0) {
        // Estimate calibration parameters
        var config = new ConfigSelfCalibDualQuadratic();
        // var config = new ConfigSelfCalibEssentialGuess();
        // config.numberOfSamples = 200;
        // config.fixedFocus = true;
        // config.sampleMin = 0.6;
        // config.sampleMax = 1.5;
        ProjectiveToMetricCameras selfcalib = FactoryMultiView.projectiveToMetric(config);
        List<ElevateViewInfo> views = new ArrayList<>();
        for (int i = 0; i < 3; i++) {
            views.add(new ElevateViewInfo(width, height, i));
        }
        List<DMatrixRMaj> cameras = new ArrayList<>();
        cameras.add(P2);
        cameras.add(P3);
        DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
        MultiViewOps.convertTr(ransac.getMatchSet(), observations);
        var results = new MetricCameras();
        boolean success = selfcalib.process(views, cameras, observations.toList(), results);
        if (success) {
            successfulSelfCalibration = true;
            listPinhole.addAll(results.intrinsics.toList());
            listWorldToView.get(0).reset();
            listWorldToView.get(1).setTo(results.motion_1_to_k.get(0));
            listWorldToView.get(2).setTo(results.motion_1_to_k.get(1));
            if (verbose != null)
                verbose.println("Auto calibration success");
        } else {
            if (verbose != null)
                verbose.println("Auto calibration failed");
        }
    }
    if (!successfulSelfCalibration) {
        // Use provided focal length or guess using an "average" focal length across cameras
        double focalLength = manualFocalLength <= 0 ? (double) (Math.max(width, height) / 2) : manualFocalLength;
        if (verbose != null)
            verbose.println("Assuming fixed focal length for all views. f=" + focalLength);
        final var estimateH = new TwoViewToCalibratingHomography();
        DMatrixRMaj F21 = MultiViewOps.projectiveToFundamental(P2, null);
        estimateH.initialize(F21, P2);
        DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(focalLength, focalLength, 0, 0, 0);
        DogArray<AssociatedPair> pairs = new DogArray<>(AssociatedPair::new);
        MultiViewOps.convertTr(ransac.getMatchSet(), 0, 1, pairs);
        if (!estimateH.process(K, K, pairs.toList()))
            throw new RuntimeException("Failed to estimate H given 'known' intrinsics");
        // Use the found calibration homography to find motion estimates
        DMatrixRMaj H = estimateH.getCalibrationHomography();
        listPinhole.clear();
        for (int i = 0; i < 3; i++) {
            listPinhole.add(PerspectiveOps.matrixToPinhole(K, width, height, null));
        }
        listWorldToView.get(0).reset();
        MultiViewOps.projectiveToMetric(P2, H, listWorldToView.get(1), K);
        MultiViewOps.projectiveToMetric(P3, H, listWorldToView.get(2), K);
    }
    if (verbose != null) {
        verbose.println("Initial Intrinsic Estimate:");
        for (int i = 0; i < 3; i++) {
            CameraPinhole r = listPinhole.get(i);
            verbose.printf("  fx = %6.1f, fy = %6.1f, skew = %6.3f\n", r.fx, r.fy, r.skew);
        }
        verbose.println("Initial Motion Estimate:");
    }
    // scale is arbitrary. Set max translation to 1
    double maxT = 0;
    for (int i = 0; i < listWorldToView.size(); i++) {
        Se3_F64 world_to_view = listWorldToView.get(i);
        maxT = Math.max(maxT, world_to_view.T.norm());
    }
    for (int i = 0; i < listWorldToView.size(); i++) {
        Se3_F64 world_to_view = listWorldToView.get(i);
        world_to_view.T.scale(1.0 / maxT);
        if (verbose != null) {
            Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(world_to_view.R, null);
            verbose.println("  T=" + world_to_view.T + "  R=" + rod);
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) DMatrixRMaj(org.ejml.data.DMatrixRMaj) DogArray(org.ddogleg.struct.DogArray) CameraPinhole(boofcv.struct.calib.CameraPinhole) VerbosePrint(org.ddogleg.struct.VerbosePrint) MetricCameras(boofcv.alg.geo.MetricCameras) ProjectiveToMetricCameras(boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras) TwoViewToCalibratingHomography(boofcv.alg.geo.selfcalib.TwoViewToCalibratingHomography) ProjectiveToMetricCameras(boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras) Rodrigues_F64(georegression.struct.so.Rodrigues_F64) ElevateViewInfo(boofcv.struct.calib.ElevateViewInfo) Se3_F64(georegression.struct.se.Se3_F64)

Aggregations

ProjectiveToMetricCameras (boofcv.abst.geo.selfcalib.ProjectiveToMetricCameras)1 MetricCameras (boofcv.alg.geo.MetricCameras)1 TwoViewToCalibratingHomography (boofcv.alg.geo.selfcalib.TwoViewToCalibratingHomography)1 CameraPinhole (boofcv.struct.calib.CameraPinhole)1 ElevateViewInfo (boofcv.struct.calib.ElevateViewInfo)1 Se3_F64 (georegression.struct.se.Se3_F64)1 Rodrigues_F64 (georegression.struct.so.Rodrigues_F64)1 ArrayList (java.util.ArrayList)1 DogArray (org.ddogleg.struct.DogArray)1 VerbosePrint (org.ddogleg.struct.VerbosePrint)1 DMatrixRMaj (org.ejml.data.DMatrixRMaj)1