use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class EstimateViewUtils method configureSbaStructure.
/**
* Configures data structures for running SBA. Which observations are used is specified by the provided inliers.
* By default all cameras and views are set to known. If these need to be optimized for a specific use case then
* 'known' should be set to false.
*
* @param inliersThreeView Specifies the observations
*/
public void configureSbaStructure(List<AssociatedTriple> inliersThreeView) {
final SceneStructureMetric structure = metricSba.structure;
final SceneObservations observations = metricSba.observations;
// Even if the cameras are all the same, we will tell that they are different just because the bookkeeping
// is so much easier and results are the same
structure.initialize(3, 3, usedThreeViewInliers.size);
observations.initialize(3);
// All cameras are known
structure.setCamera(0, true, camera1);
structure.setCamera(1, true, camera2);
structure.setCamera(2, true, camera3);
// All transforms are known but the target
structure.setView(0, 0, true, view1_to_view1);
structure.setView(1, 1, true, view1_to_view2);
structure.setView(2, 2, true, view1_to_target);
observations.getView(0).resize(usedThreeViewInliers.size());
observations.getView(1).resize(usedThreeViewInliers.size());
observations.getView(2).resize(usedThreeViewInliers.size());
SceneObservations.View viewObs1 = observations.getView(0);
SceneObservations.View viewObs2 = observations.getView(1);
SceneObservations.View viewObs3 = observations.getView(2);
final TriangulateNViewsMetricH triangulator = metricSba.triangulator;
var foundX = new Point4D_F64();
// Only use features that were in the inlier set for PnP
for (int inlierCnt = 0; inlierCnt < usedThreeViewInliers.size(); inlierCnt++) {
int threeViewInlierIndex = usedThreeViewInliers.get(inlierCnt);
AssociatedTriple a = inliersThreeView.get(threeViewInlierIndex);
// Pass in pixel observations for each view
viewObs1.set(inlierCnt, inlierCnt, (float) a.p1.x, (float) a.p1.y);
viewObs2.set(inlierCnt, inlierCnt, (float) a.p2.x, (float) a.p2.y);
viewObs3.set(inlierCnt, inlierCnt, (float) a.p3.x, (float) a.p3.y);
normalize1.compute(a.p1.x, a.p1.y, pixelNorms.get(0));
normalize2.compute(a.p2.x, a.p2.y, pixelNorms.get(1));
normalize3.compute(a.p3.x, a.p3.y, pixelNorms.get(2));
if (!triangulator.triangulate(pixelNorms, listMotion, foundX)) {
throw new RuntimeException("Triangulation failed. Possibly bad input. Handle this problem");
}
if (structure.isHomogenous())
structure.setPoint(inlierCnt, foundX.x, foundX.y, foundX.z, foundX.w);
else
structure.setPoint(inlierCnt, foundX.x / foundX.w, foundX.y / foundX.w, foundX.z / foundX.w);
structure.connectPointToView(inlierCnt, 0);
structure.connectPointToView(inlierCnt, 1);
structure.connectPointToView(inlierCnt, 2);
}
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class MultiViewStereoFromKnownSceneStructure method process.
/**
* Computes a point cloud given the known scene and a set of stereo pairs.
*
* @param scene (Input) Specifies the scene parameters for each view. Extrinsic and intrinsic.
* @param pairs (Input) Which views are to be used and their relationship to each other
*/
public void process(SceneStructureMetric scene, StereoPairGraph pairs) {
initializeListener();
// Go through each view and compute score for use as a common / "left" stereo image
initializeScores(scene, pairs);
scoreViewsSelectStereoPairs(scene);
// Initialize data structures
disparityCloud.reset();
listCenters.clear();
// Sort views based on their score for being the center view. best views are first
Collections.sort(arrayScores.toList(), Comparator.comparingDouble(a -> -a.score));
// Prune centers with redundant information
pruneViewsThatAreSimilarByNeighbors(scene);
// Go through the list of views and use unused views as center views when computing the overall 3D cloud
for (int index = 0; index < arrayScores.size; index++) {
ViewInfo center = arrayScores.get(index);
// if already processed skip over
if (center.used)
continue;
if (verbose != null)
verbose.println("Center[" + index + "] View='" + center.relations.id + "'");
// TODO compute fraction of view area which has already been covered by a previous center
// and skip if over a certain value to avoid wasting time. This can happen if a view is very
// similar to a "center" view and not used due to lack of 3D information between the two
// Compute a disparity image for this cluster of stereo pairs
selectAndLoadConnectedImages(pairs, center.relations);
// If none of the connected views had enough quality abort
if (imagePairIndexesSba.size() < 1) {
if (verbose != null)
verbose.println("_ too few connections to use as a center");
continue;
}
// Record that this view was used as a center
listCenters.add(center);
// Add image for center view
indexSbaToViewID.put(center.relations.indexSba, center.relations.id);
// Compute the fused disparity from all the views, then add points to the point cloud
if (!computeFusedDisparityAddCloud(scene, center, indexSbaToViewID, imagePairIndexesSba)) {
// Failed to compute a fused disparity image
// Remove it from the lists
listCenters.remove(listCenters.size() - 1);
indexSbaToViewID.remove(center.relations.indexSba);
}
}
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class ColorizeMultiViewStereoResults method processScenePoints.
/**
* Looks up the colors for all the points in the scene by reprojecting them back onto their original images.
*
* @param scene (Input) Scene's structure
* @param indexToId (Input) Convert view index to view ID
* @param indexColor (Output) RGB values are passed through to this function.
*/
public void processScenePoints(SceneStructureMetric scene, BoofLambdas.IndexToString indexToId, BoofLambdas.IndexRgbConsumer indexColor) {
// Loading images is expensive so when we get the color of each pixel we want to process all features
// inside the same image at once. Unfortunately there is no fast way to look up all features by image.
// So a lookup table is constructed below
List<DogArray_I32> lookupPointsByView = new ArrayList<>();
for (int i = 0; i < scene.views.size; i++) {
lookupPointsByView.add(new DogArray_I32());
}
// Add the first view each point was seen in to the list
for (int pointIdx = 0; pointIdx < scene.points.size; pointIdx++) {
SceneStructureCommon.Point p = scene.points.get(pointIdx);
lookupPointsByView.get(p.views.get(0)).add(pointIdx);
}
// TODO in the future generalize this for 3D and 4D points
var iterator = new ScenePointsSetIterator<>(new PointIndex4D_F64());
var world_to_view = new Se3_F64();
for (int viewIdx = 0; viewIdx < lookupPointsByView.size(); viewIdx++) {
// Load the image
checkTrue(lookupImages.loadImage(indexToId.process(viewIdx), image), "Failed to load image");
// Set up the iterator for this image
iterator.initialize(scene, lookupPointsByView.get(viewIdx));
// Get the view that is being processed
SceneStructureMetric.View v = scene.views.get(viewIdx);
// Setup the camera projection model using bundle adjustment model directly
BundleAdjustmentOps.convert(scene.getViewCamera(v).model, image.width, image.height, intrinsic);
Point2Transform2_F64 norm_to_pixel = new LensDistortionBrown(intrinsic).distort_F64(false, true);
// Get the transform from world/cloud to this view
scene.getWorldToView(v, world_to_view, tmp);
// Grab the colorized points from this view
colorizer.process4(image, iterator, world_to_view, norm_to_pixel, indexColor);
}
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class GenerateStereoPairGraphFromScene method estimateRadiansToPixels.
/**
* Using the center of the image, estimate the relationship between angle (radians) and image pixels. This is only
* an approximation and is typically worse the farther you are from the center. This is especially true in
* fisheye images. Also disparity is computed from rectified images which lack distortion. An accurate
* model would be more complex.
*/
private void estimateRadiansToPixels(SceneStructureMetric scene) {
double oneDegree = Math.sin(UtilAngle.degreeToRadian(5.0));
Point2D_F64 pixelA = new Point2D_F64();
Point2D_F64 pixelB = new Point2D_F64();
for (int i = 0; i < scene.views.size; i++) {
SceneStructureMetric.View view = scene.views.get(i);
BundleAdjustmentCamera camera = Objects.requireNonNull(scene.getViewCamera(view).model);
camera.project(0.0, 0.0, 1.0, pixelA);
camera.project(oneDegree, 0.0, 1.0, pixelB);
views.get(i).radianToPixels = pixelA.distance(pixelB) / UtilAngle.degreeToRadian(5.0);
}
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class RefineMetricWorkingGraph method refineViews.
/**
* Refines the scene and updates the graph.
*
* @param graph (Output) where the updated scene parameters are written to.
* @return true is successful or false is SBA failed
*/
protected boolean refineViews(SceneWorkingGraph graph) {
if (!metricSba.process())
return false;
final SceneStructureMetric structure = metricSba.structure;
// save the results
for (int cameraIdx = 0; cameraIdx < graph.listCameras.size(); cameraIdx++) {
BundlePinholeSimplified found = (BundlePinholeSimplified) structure.cameras.get(cameraIdx).model;
graph.listCameras.get(cameraIdx).intrinsic.setTo(found);
}
for (int viewIdx = 0; viewIdx < graph.listViews.size(); viewIdx++) {
SceneWorkingGraph.View wview = graph.listViews.get(viewIdx);
wview.world_to_view.setTo(structure.getParentToView(viewIdx));
if (verbose != null && verboseViewInfo) {
BundlePinholeSimplified intrinsics = graph.getViewCamera(wview).intrinsic;
Se3_F64 m = metricSba.structure.getParentToView(viewIdx);
double theta = ConvertRotation3D_F64.matrixToRodrigues(m.R, null).theta;
verbose.printf("AFTER view='%s' T=(%.2f %.2f %.2f) R=%.4f, f=%.1f k1=%.1e k2=%.1e\n", wview.pview.id, m.T.x, m.T.y, m.T.z, theta, intrinsics.f, intrinsics.k1, intrinsics.k2);
}
}
return true;
}
Aggregations