use of georegression.struct.so.Rodrigues_F64 in project BoofCV by lessthanoptimal.
the class CodecBundleAdjustmentInTheLarge method save.
public void save(File file) throws IOException {
PrintStream writer = new PrintStream(file);
writer.println(scene.views.size + " " + scene.points.size + " " + observations.getObservationCount());
PointIndex2D_F64 o = new PointIndex2D_F64();
for (int viewIdx = 0; viewIdx < observations.views.size; viewIdx++) {
SceneObservations.View view = observations.views.data[viewIdx];
for (int obsIdx = 0; obsIdx < view.size(); obsIdx++) {
view.getPixel(obsIdx, o);
writer.printf("%d %d %.8f %.8f\n", viewIdx, o.index, o.p.x, o.p.y);
}
}
Rodrigues_F64 axisAngle = new Rodrigues_F64();
for (int viewIdx = 0; viewIdx < scene.views.size; viewIdx++) {
SceneStructureMetric.View view = scene.views.data[viewIdx];
BundlePinholeSnavely camera = Objects.requireNonNull(scene.cameras.get(view.camera).getModel());
Se3_F64 parent_to_view = scene.getParentToView(viewIdx);
ConvertRotation3D_F64.matrixToRodrigues(parent_to_view.R, axisAngle);
double axisX = axisAngle.unitAxisRotation.x * axisAngle.theta;
double axisY = axisAngle.unitAxisRotation.y * axisAngle.theta;
double axisZ = axisAngle.unitAxisRotation.z * axisAngle.theta;
writer.printf("%.10f\n%.10f\n%.10f\n", axisX, axisY, axisZ);
writer.printf("%.10f\n%.10f\n%.10f\n", parent_to_view.T.x, parent_to_view.T.y, parent_to_view.T.z);
writer.printf("%.10f\n%.10f\n%.10f\n", camera.f, camera.k1, camera.k2);
}
for (int pointId = 0; pointId < scene.points.size; pointId++) {
SceneStructureCommon.Point p = scene.points.data[pointId];
writer.printf("%.10f\n%.10f\n%.10f\n", p.coordinate[0], p.coordinate[1], p.coordinate[2]);
}
writer.close();
}
use of georegression.struct.so.Rodrigues_F64 in project BoofCV by lessthanoptimal.
the class CodecBundleAdjustmentInTheLarge method parse.
public void parse(File file) throws IOException {
InputStream stream = UtilIO.openStream(file.getPath());
if (stream == null)
throw new IOException("Can't open file: " + file.getPath());
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, UTF_8));
String[] words = reader.readLine().split("\\s+");
if (words.length != 3)
throw new IOException("Unexpected number of words on first line");
int numCameras = Integer.parseInt(words[0]);
int numPoints = Integer.parseInt(words[1]);
int numObservations = Integer.parseInt(words[2]);
scene = new SceneStructureMetric(false);
scene.initialize(numCameras, numCameras, numPoints);
observations = new SceneObservations();
observations.initialize(numCameras);
for (int i = 0; i < numObservations; i++) {
words = reader.readLine().split("\\s+");
if (words.length != 4)
throw new IOException("Unexpected number of words in obs");
int cameraID = Integer.parseInt(words[0]);
int pointID = Integer.parseInt(words[1]);
float pixelX = Float.parseFloat(words[2]);
float pixelY = Float.parseFloat(words[3]);
if (pointID >= numPoints) {
throw new RuntimeException("Out of bounds pointID");
}
if (cameraID >= numCameras) {
throw new RuntimeException("Out of bounds cameraID");
}
observations.getView(cameraID).add(pointID, pixelX, pixelY);
}
Se3_F64 worldToCameraGL = new Se3_F64();
Rodrigues_F64 rod = new Rodrigues_F64();
for (int i = 0; i < numCameras; i++) {
rod.unitAxisRotation.x = Double.parseDouble(reader.readLine());
rod.unitAxisRotation.y = Double.parseDouble(reader.readLine());
rod.unitAxisRotation.z = Double.parseDouble(reader.readLine());
rod.theta = rod.unitAxisRotation.norm();
if (rod.theta != 0)
rod.unitAxisRotation.divide(rod.theta);
worldToCameraGL.T.x = Double.parseDouble(reader.readLine());
worldToCameraGL.T.y = Double.parseDouble(reader.readLine());
worldToCameraGL.T.z = Double.parseDouble(reader.readLine());
ConvertRotation3D_F64.rodriguesToMatrix(rod, worldToCameraGL.R);
BundlePinholeSnavely camera = new BundlePinholeSnavely();
camera.f = Double.parseDouble(reader.readLine());
camera.k1 = Double.parseDouble(reader.readLine());
camera.k2 = Double.parseDouble(reader.readLine());
scene.setCamera(i, false, camera);
scene.setView(i, i, false, worldToCameraGL);
}
Point3D_F64 P = new Point3D_F64();
for (int i = 0; i < numPoints; i++) {
P.x = Float.parseFloat(reader.readLine());
P.y = Float.parseFloat(reader.readLine());
P.z = Float.parseFloat(reader.readLine());
// GeometryMath_F64.mult(glToCv.R,P,P);
scene.setPoint(i, P.x, P.y, P.z);
}
for (int i = 0; i < observations.views.size; i++) {
View v = observations.getView(i);
for (int j = 0; j < v.point.size; j++) {
scene.connectPointToView(v.getPointId(j), i);
}
}
reader.close();
observations.checkOneObservationPerView();
}
use of georegression.struct.so.Rodrigues_F64 in project BoofCV by lessthanoptimal.
the class TestMetricSanityChecks method checkPhysicalConstraints_inliers_ErrorBehind.
@Test
void checkPhysicalConstraints_inliers_ErrorBehind() {
var alg = new MetricSanityChecks();
// Flip the view around so that points will appear to be behind it after triangulation
// This is done by rotating around the camera's +y axis 180 degrees
Rodrigues_F64 rod = new Rodrigues_F64();
rod.unitAxisRotation.setTo(wview.world_to_view.R.get(1, 0), wview.world_to_view.R.get(1, 1), wview.world_to_view.R.get(1, 2));
rod.theta = Math.PI;
DMatrixRMaj flip = ConvertRotation3D_F64.rodriguesToMatrix(rod, null);
CommonOps_DDRM.mult(flip, wview.world_to_view.R.copy(), wview.world_to_view.R);
// NOTE: This test isn't going as planned. There should be N points behind the camera, no bound errors
// and no reprojection errors since all that stuff should not be significantly affected by it being
// flipped
// Sanity check to make sure it's testing something
int N = wview.inliers.get(0).getInlierCount();
assertTrue(N > 20);
alg.checkPhysicalConstraints(dbSimilar, scene, wview, 0);
assertEquals(0, alg.failedTriangulate);
// Again fewer than expected, but we can see it's checking at least
assertTrue(alg.failedBehind > N * 0.9);
// assertEquals(0, alg.failedImageBounds);
// assertEquals(0, alg.failedReprojection);
}
use of georegression.struct.so.Rodrigues_F64 in project BoofCV by lessthanoptimal.
the class ThreeViewEstimateMetricScene method projectiveToMetric.
/**
* Estimates the transform from projective to metric geometry
*
* @return true if successful
*/
boolean projectiveToMetric() {
// homography from projective to metric
listPinhole.clear();
boolean successfulSelfCalibration = false;
if (manualFocalLength <= 0) {
// Estimate calibration parameters
var config = new ConfigSelfCalibDualQuadratic();
// var config = new ConfigSelfCalibEssentialGuess();
// config.numberOfSamples = 200;
// config.fixedFocus = true;
// config.sampleMin = 0.6;
// config.sampleMax = 1.5;
ProjectiveToMetricCameras selfcalib = FactoryMultiView.projectiveToMetric(config);
List<ElevateViewInfo> views = new ArrayList<>();
for (int i = 0; i < 3; i++) {
views.add(new ElevateViewInfo(width, height, i));
}
List<DMatrixRMaj> cameras = new ArrayList<>();
cameras.add(P2);
cameras.add(P3);
DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
MultiViewOps.convertTr(ransac.getMatchSet(), observations);
var results = new MetricCameras();
boolean success = selfcalib.process(views, cameras, observations.toList(), results);
if (success) {
successfulSelfCalibration = true;
listPinhole.addAll(results.intrinsics.toList());
listWorldToView.get(0).reset();
listWorldToView.get(1).setTo(results.motion_1_to_k.get(0));
listWorldToView.get(2).setTo(results.motion_1_to_k.get(1));
if (verbose != null)
verbose.println("Auto calibration success");
} else {
if (verbose != null)
verbose.println("Auto calibration failed");
}
}
if (!successfulSelfCalibration) {
// Use provided focal length or guess using an "average" focal length across cameras
double focalLength = manualFocalLength <= 0 ? (double) (Math.max(width, height) / 2) : manualFocalLength;
if (verbose != null)
verbose.println("Assuming fixed focal length for all views. f=" + focalLength);
final var estimateH = new TwoViewToCalibratingHomography();
DMatrixRMaj F21 = MultiViewOps.projectiveToFundamental(P2, null);
estimateH.initialize(F21, P2);
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(focalLength, focalLength, 0, 0, 0);
DogArray<AssociatedPair> pairs = new DogArray<>(AssociatedPair::new);
MultiViewOps.convertTr(ransac.getMatchSet(), 0, 1, pairs);
if (!estimateH.process(K, K, pairs.toList()))
throw new RuntimeException("Failed to estimate H given 'known' intrinsics");
// Use the found calibration homography to find motion estimates
DMatrixRMaj H = estimateH.getCalibrationHomography();
listPinhole.clear();
for (int i = 0; i < 3; i++) {
listPinhole.add(PerspectiveOps.matrixToPinhole(K, width, height, null));
}
listWorldToView.get(0).reset();
MultiViewOps.projectiveToMetric(P2, H, listWorldToView.get(1), K);
MultiViewOps.projectiveToMetric(P3, H, listWorldToView.get(2), K);
}
if (verbose != null) {
verbose.println("Initial Intrinsic Estimate:");
for (int i = 0; i < 3; i++) {
CameraPinhole r = listPinhole.get(i);
verbose.printf(" fx = %6.1f, fy = %6.1f, skew = %6.3f\n", r.fx, r.fy, r.skew);
}
verbose.println("Initial Motion Estimate:");
}
// scale is arbitrary. Set max translation to 1
double maxT = 0;
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
maxT = Math.max(maxT, world_to_view.T.norm());
}
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
world_to_view.T.scale(1.0 / maxT);
if (verbose != null) {
Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(world_to_view.R, null);
verbose.println(" T=" + world_to_view.T + " R=" + rod);
}
}
return true;
}
use of georegression.struct.so.Rodrigues_F64 in project BoofCV by lessthanoptimal.
the class BoofTesting method assertEquals.
public static void assertEquals(Se3_F64 expected, Se3_F64 found, double tolAngle, double tolT) {
var R = new DMatrixRMaj(3, 3);
CommonOps_DDRM.multTransA(expected.R, found.R, R);
Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(R, null);
assertEquals(0.0, rod.theta, tolAngle);
assertEquals(0.0, found.T.distance(expected.T), tolT);
}
Aggregations