use of boofcv.alg.geo.selfcalib.TwoViewToCalibratingHomography in project BoofCV by lessthanoptimal.
the class ThreeViewEstimateMetricScene method projectiveToMetric.
/**
* Estimates the transform from projective to metric geometry
*
* @return true if successful
*/
boolean projectiveToMetric() {
// homography from projective to metric
listPinhole.clear();
boolean successfulSelfCalibration = false;
if (manualFocalLength <= 0) {
// Estimate calibration parameters
var config = new ConfigSelfCalibDualQuadratic();
// var config = new ConfigSelfCalibEssentialGuess();
// config.numberOfSamples = 200;
// config.fixedFocus = true;
// config.sampleMin = 0.6;
// config.sampleMax = 1.5;
ProjectiveToMetricCameras selfcalib = FactoryMultiView.projectiveToMetric(config);
List<ElevateViewInfo> views = new ArrayList<>();
for (int i = 0; i < 3; i++) {
views.add(new ElevateViewInfo(width, height, i));
}
List<DMatrixRMaj> cameras = new ArrayList<>();
cameras.add(P2);
cameras.add(P3);
DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
MultiViewOps.convertTr(ransac.getMatchSet(), observations);
var results = new MetricCameras();
boolean success = selfcalib.process(views, cameras, observations.toList(), results);
if (success) {
successfulSelfCalibration = true;
listPinhole.addAll(results.intrinsics.toList());
listWorldToView.get(0).reset();
listWorldToView.get(1).setTo(results.motion_1_to_k.get(0));
listWorldToView.get(2).setTo(results.motion_1_to_k.get(1));
if (verbose != null)
verbose.println("Auto calibration success");
} else {
if (verbose != null)
verbose.println("Auto calibration failed");
}
}
if (!successfulSelfCalibration) {
// Use provided focal length or guess using an "average" focal length across cameras
double focalLength = manualFocalLength <= 0 ? (double) (Math.max(width, height) / 2) : manualFocalLength;
if (verbose != null)
verbose.println("Assuming fixed focal length for all views. f=" + focalLength);
final var estimateH = new TwoViewToCalibratingHomography();
DMatrixRMaj F21 = MultiViewOps.projectiveToFundamental(P2, null);
estimateH.initialize(F21, P2);
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(focalLength, focalLength, 0, 0, 0);
DogArray<AssociatedPair> pairs = new DogArray<>(AssociatedPair::new);
MultiViewOps.convertTr(ransac.getMatchSet(), 0, 1, pairs);
if (!estimateH.process(K, K, pairs.toList()))
throw new RuntimeException("Failed to estimate H given 'known' intrinsics");
// Use the found calibration homography to find motion estimates
DMatrixRMaj H = estimateH.getCalibrationHomography();
listPinhole.clear();
for (int i = 0; i < 3; i++) {
listPinhole.add(PerspectiveOps.matrixToPinhole(K, width, height, null));
}
listWorldToView.get(0).reset();
MultiViewOps.projectiveToMetric(P2, H, listWorldToView.get(1), K);
MultiViewOps.projectiveToMetric(P3, H, listWorldToView.get(2), K);
}
if (verbose != null) {
verbose.println("Initial Intrinsic Estimate:");
for (int i = 0; i < 3; i++) {
CameraPinhole r = listPinhole.get(i);
verbose.printf(" fx = %6.1f, fy = %6.1f, skew = %6.3f\n", r.fx, r.fy, r.skew);
}
verbose.println("Initial Motion Estimate:");
}
// scale is arbitrary. Set max translation to 1
double maxT = 0;
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
maxT = Math.max(maxT, world_to_view.T.norm());
}
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
world_to_view.T.scale(1.0 / maxT);
if (verbose != null) {
Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(world_to_view.R, null);
verbose.println(" T=" + world_to_view.T + " R=" + rod);
}
}
return true;
}
Aggregations