use of boofcv.alg.structure.PairwiseImageGraph.View in project BoofCV by lessthanoptimal.
the class ProjectiveInitializeAllCommon method initializeStructureForAllViews.
/**
* Initializes the bundle adjustment structure for all views not just the initial set of 3. The seed view is
* view index=0. The other views are in order of `seedConnIdx` after that.
*/
private void initializeStructureForAllViews(LookUpCameraInfo db, int numberOfFeatures, View seed, DogArray_I32 seedConnIdx) {
utils.observations.initialize(1 + seedConnIdx.size);
utils.structure.initialize(1 + seedConnIdx.size, numberOfFeatures);
viewsByStructureIndex.resize(utils.structure.views.size, null);
utils.triangulateFeatures();
// Added the seed view
db.lookupViewShape(seed.id, shape);
utils.structure.setView(0, true, utils.P1, shape.width, shape.height);
// Add the two views connected to it. Note that the index of these views is based on their index
// in the seedConnIdx list
int indexSbaViewB = 1 + seedConnIdx.indexOf(selectedTriple[0]);
int indexSbaViewC = 1 + seedConnIdx.indexOf(selectedTriple[1]);
checkTrue(indexSbaViewB > 0 && indexSbaViewC > 0, "indexOf() failed");
for (int i = 0; i < 2; i++) {
Motion motion = seed.connections.get(selectedTriple[i]);
View view = motion.other(seed);
db.lookupViewShape(view.id, shape);
utils.structure.setView(i == 0 ? indexSbaViewB : indexSbaViewC, false, i == 0 ? utils.P2 : utils.P3, shape.width, shape.height);
}
// create lookup table
viewsByStructureIndex.set(0, seed);
viewsByStructureIndex.set(indexSbaViewB, utils.viewB);
viewsByStructureIndex.set(indexSbaViewC, utils.viewC);
// Observations for the initial three view
SceneObservations.View view1 = utils.observations.getView(0);
SceneObservations.View view2 = utils.observations.getView(indexSbaViewB);
SceneObservations.View view3 = utils.observations.getView(indexSbaViewC);
for (int i = 0; i < utils.inliersThreeView.size(); i++) {
AssociatedTriple t = utils.inliersThreeView.get(i);
view1.add(i, (float) t.p1.x, (float) t.p1.y);
view2.add(i, (float) t.p2.x, (float) t.p2.y);
view3.add(i, (float) t.p3.x, (float) t.p3.y);
}
}
use of boofcv.alg.structure.PairwiseImageGraph.View in project BoofCV by lessthanoptimal.
the class ProjectiveInitializeAllCommon method createObservationsForBundleAdjustment.
/**
* Convert observations into a format which bundle adjustment will understand
*
* @param seedConnIdx Which edges in seed to use
*/
protected void createObservationsForBundleAdjustment(DogArray_I32 seedConnIdx) {
DogArray_I32 inlierToSeed = inlierIndexes.get(0);
// seed view + the motions
utils.observations.initialize(inlierIndexes.size);
// Observations for the seed view are a special case
{
SceneObservations.View obsView = utils.observations.getView(0);
for (int i = 0; i < inlierToSeed.size; i++) {
int id = inlierToSeed.data[i];
// featsA is never modified after initially loaded
Point2D_F64 o = utils.featsA.get(id);
id = seedToStructure.data[id];
obsView.add(id, (float) o.x, (float) o.y);
}
}
// Now add observations for edges connected to the seed
for (int motionIdx = 0; motionIdx < seedConnIdx.size(); motionIdx++) {
SceneObservations.View obsView = utils.observations.getView(motionIdx + 1);
Motion m = utils.seed.connections.get(seedConnIdx.get(motionIdx));
View v = m.other(utils.seed);
boolean seedIsSrc = m.src == utils.seed;
utils.dbCams.lookupCalibration(utils.dbCams.viewToCamera(v.id), utils.priorCamB);
utils.dbSimilar.lookupPixelFeats(v.id, utils.featsB);
BoofMiscOps.offsetPixels(utils.featsB.toList(), -utils.priorCamB.cx, -utils.priorCamB.cy);
// indicate which observation from this view contributed to which 3D feature
DogArray_I32 connInlierIndexes = inlierIndexes.get(motionIdx + 1);
connInlierIndexes.resize(inlierToSeed.size);
for (int epipolarInlierIdx = 0; epipolarInlierIdx < m.inliers.size; epipolarInlierIdx++) {
AssociatedIndex a = m.inliers.get(epipolarInlierIdx);
// See if the feature is one of inliers computed from 3-view RANSAC
int structId = seedToStructure.data[seedIsSrc ? a.src : a.dst];
if (structId < 0)
continue;
// get the observation in this view to that feature[structId]
connInlierIndexes.set(structId, seedIsSrc ? a.dst : a.src);
Point2D_F64 o = utils.featsB.get(seedIsSrc ? a.dst : a.src);
obsView.add(structId, (float) o.x, (float) o.y);
}
}
}
use of boofcv.alg.structure.PairwiseImageGraph.View in project BoofCV by lessthanoptimal.
the class MockLookupSimilarImagesCircleAround method init.
/**
* Configures the scene
*
* @param numViews number of views to create
* @param numViewConnect Specifies 1/2 the number of views each view will be connected to.
*/
public MockLookupSimilarImagesCircleAround init(int numViews, int numViewConnect) {
for (int viewCnt = 0; viewCnt < numViews; viewCnt++) {
viewIds.add("View_" + viewCnt);
}
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(intrinsic, (DMatrixRMaj) null);
DMatrixRMaj K_zero = K.copy();
K_zero.set(0, 2, 0.0);
K_zero.set(1, 2, 0.0);
// Randomly add points around the coordinate system's origin
feats3D = UtilPoint3D_F64.random(new Point3D_F64(0, 0, 0), -0.5, 0.5, numFeatures, rand);
// Radius of the cameras circling the origin
double pathRadius = 2;
// render pixel coordinates of all points
for (int viewCnt = 0; viewCnt < numViews; viewCnt++) {
Se3_F64 camera_to_world = new Se3_F64();
Se3_F64 world_to_camera = new Se3_F64();
double yaw = 2.0 * Math.PI * viewCnt / numViews;
// camera lie on the (X,Z) plane with +y pointed down.
// This is done to make the camera coordinate system and the world coordinate system have a more close
// relationship
camera_to_world.T.x = Math.cos(yaw) * pathRadius;
// geometric diversity for self calibration
camera_to_world.T.y = rand.nextGaussian() * pathRadius * 0.1;
camera_to_world.T.z = Math.sin(yaw) * pathRadius;
// camera is pointing in the opposite direction of it's world location
ConvertRotation3D_F64.rodriguesToMatrix(new Rodrigues_F64(yaw + Math.PI / 2, 0, -1, 0), camera_to_world.R);
camera_to_world.invert(world_to_camera);
// Create the camera matrix P
DMatrixRMaj P = PerspectiveOps.createCameraMatrix(world_to_camera.R, world_to_camera.T, K, null);
DMatrixRMaj P_zero = PerspectiveOps.createCameraMatrix(world_to_camera.R, world_to_camera.T, K_zero, null);
// save information on the view
listOriginToView.add(world_to_camera);
listCameraMatrices.add(P);
listCameraMatricesZeroPrinciple.add(P_zero);
// Observed features in the view
List<Point2D_F64> viewPixels = new ArrayList<>();
viewObs.add(viewPixels);
// create look up table from view to feature
// we don't want features to have same index because that's not realistic and would hide bugs
int[] v2f = PrimitiveArrays.fillCounting(numFeatures);
PrimitiveArrays.shuffle(v2f, 0, numFeatures, rand);
viewToFeat.add(v2f);
// save reverse table for fast lookup later
int[] f2v = new int[numFeatures];
for (int j = 0; j < numFeatures; j++) {
f2v[v2f[j]] = j;
}
featToView.add(f2v);
// note the featIdx is the index of the feature in the view
for (int featCnt = 0; featCnt < feats3D.size(); featCnt++) {
Point3D_F64 X = feats3D.get(v2f[featCnt]);
Point2D_F64 pixel = PerspectiveOps.renderPixel(world_to_camera, intrinsic, X, null);
if (pixel == null)
throw new RuntimeException("Out of FOV");
viewPixels.add(pixel);
}
}
// Create the pairwise graph
for (int i = 0; i < numViews; i++) {
View v = graph.createNode(viewIds.get(i));
v.totalObservations = numFeatures;
}
// Only connect neighbors to each other
for (int viewIdxI = 0; viewIdxI < numViews; viewIdxI++) {
View vi = graph.nodes.get(viewIdxI);
for (int neighborOffset = 1; neighborOffset <= numViewConnect; neighborOffset++) {
int viewIdxJ = viewIdxI + neighborOffset;
if (viewIdxJ >= numViews)
break;
// next view while wrapping around
View vj = graph.nodes.get(viewIdxJ);
// mix of the src/dst to exercise more code during testing
boolean standardOrder = (viewIdxI + neighborOffset) % 2 == 0;
PairwiseImageGraph.Motion m = standardOrder ? graph.connect(vi, vj) : graph.connect(vj, vi);
m.score3D = 5.0 / 7.0;
m.is3D = true;
int[] tableI = featToView.get(viewIdxI);
int[] tableJ = featToView.get(viewIdxJ);
for (int i = 0; i < numFeatures; i++) {
if (standardOrder) {
m.inliers.grow().setTo(tableI[i], tableJ[i], 0.0);
} else {
m.inliers.grow().setTo(tableJ[i], tableI[i], 0.0);
}
}
}
}
return this;
}
use of boofcv.alg.structure.PairwiseImageGraph.View in project BoofCV by lessthanoptimal.
the class ProjectiveInitializeAllCommon method findRemainingCameraMatrices.
/**
* Uses the triangulated points and observations in the root view to estimate the camera matrix for
* all the views which are remaining. We are assuming that outliers have already been removed.
*
* @param seedConnIdx (Input) Specifies which connections in 'seed.connections' are to be used.
* @return true if successful or false if not
*/
boolean findRemainingCameraMatrices(LookUpSimilarImages dbSimilar, LookUpCameraInfo dbCams, View seed, DogArray_I32 seedConnIdx) {
int numInliers = inlierIndexes.get(0).size;
BoofMiscOps.checkTrue(numInliers == utils.inliersThreeView.size());
// Look up the 3D coordinates of features from the scene's structure previously computed
// points in 3D
points3D.reset();
for (int i = 0; i < utils.structure.points.size; i++) {
utils.structure.points.data[i].get(points3D.grow());
}
// contains associated pairs of pixel observations
// save a call to db by using the previously loaded points for the seed view
assocPixel.resize(numInliers);
for (int i = 0; i < numInliers; i++) {
// inliers from triple RANSAC
// each of these inliers was declared a feature in the world reference frame
assocPixel.get(i).p1.setTo(utils.inliersThreeView.get(i).p1);
}
var cameraMatrix = new DMatrixRMaj(3, 4);
for (int motionIdx = 0; motionIdx < seedConnIdx.size; motionIdx++) {
int connectionIdx = seedConnIdx.get(motionIdx);
// skip views already in the scene's structure
if (connectionIdx == selectedTriple[0] || connectionIdx == selectedTriple[1])
continue;
Motion edge = seed.connections.get(connectionIdx);
View viewI = edge.other(seed);
// Lookup pixel locations of features in the connected view
dbCams.lookupCalibration(viewI.id, utils.priorCamB);
dbSimilar.lookupPixelFeats(viewI.id, utils.featsB);
BoofMiscOps.offsetPixels(utils.featsB.toList(), -utils.priorCamB.cx, -utils.priorCamB.cy);
if (!computeCameraMatrix(seed, edge, utils.featsB, cameraMatrix)) {
if (verbose != null)
verbose.println("Pose estimator failed! view='" + viewI.id + "'");
// TODO skip over this view instead
return false;
}
if (verbose != null)
verbose.println("Expanded initial scene to include view='" + viewI.id + "'");
// ---------------------------------------------------------------------------
// Add all the information from this view to SBA data structure
int indexSbaView = motionIdx + 1;
// image information and found camera matrix
dbCams.lookupViewShape(edge.other(seed).id, shape);
utils.structure.setView(indexSbaView, false, cameraMatrix, shape.width, shape.height);
// observation of features
SceneObservations.View sbaObsView = utils.observations.getView(indexSbaView);
checkTrue(sbaObsView.size() == 0, "Must be reset to initial state first");
for (int i = 0; i < numInliers; i++) {
Point2D_F64 p = assocPixel.get(i).p2;
sbaObsView.add(i, (float) p.x, (float) p.y);
}
viewsByStructureIndex.set(indexSbaView, viewI);
}
return true;
}
use of boofcv.alg.structure.PairwiseImageGraph.View in project BoofCV by lessthanoptimal.
the class ProjectiveInitializeAllCommon method selectInitialTriplet.
/**
* Exhaustively look at all triplets that connect with the seed view
*
* @param edgeIdxs (input) List of edges in seed it will consider
* @param selected (output) Indexes of the two selected edges going out of `seed`
*/
boolean selectInitialTriplet(View seed, DogArray_I32 edgeIdxs, int[] selected) {
BoofMiscOps.checkTrue(selected.length == 2);
// zero is used for invalid triples
double bestScore = 0;
for (int i = 0; i < edgeIdxs.size; i++) {
int edgeI = edgeIdxs.get(i);
View viewB = seed.connections.get(edgeI).other(seed);
for (int j = i + 1; j < edgeIdxs.size; j++) {
int edgeJ = edgeIdxs.get(j);
View viewC = seed.connections.get(edgeJ).other(seed);
double s = scoreTripleView(seed, viewB, viewC);
if (s > bestScore) {
bestScore = s;
selected[0] = edgeI;
selected[1] = edgeJ;
}
}
}
return bestScore != 0;
}
Aggregations