use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class TestBundleAdjustmentMetricResidualFunction method createObservations.
static SceneObservations createObservations(Random rand, SceneStructureMetric structure) {
SceneObservations obs = new SceneObservations();
obs.initialize(structure.views.size, structure.hasRigid());
for (int j = 0; j < structure.points.size; j++) {
SceneStructureCommon.Point p = structure.points.data[j];
for (int i = 0; i < p.views.size; i++) {
SceneObservations.View v = obs.getView(p.views.get(i));
v.point.add(j);
v.observations.add(rand.nextInt(300) + 20);
v.observations.add(rand.nextInt(300) + 20);
}
}
if (structure.hasRigid()) {
for (int indexRigid = 0; indexRigid < structure.rigids.size; indexRigid++) {
SceneStructureMetric.Rigid r = structure.rigids.data[indexRigid];
for (int i = 0; i < r.points.length; i++) {
SceneStructureCommon.Point p = r.points[i];
int indexPoint = r.indexFirst + i;
for (int j = 0; j < p.views.size; j++) {
SceneObservations.View v = obs.getViewRigid(p.views.get(j));
v.point.add(indexPoint);
v.observations.add(rand.nextInt(300) + 20);
v.observations.add(rand.nextInt(300) + 20);
}
}
}
}
return obs;
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class TestBundleAdjustmentMetricResidualFunction method multipleCalls.
void multipleCalls(boolean homogenous, boolean hasRigid, boolean hasRelative) {
SceneStructureMetric structure = createScene(rand, homogenous, hasRigid, hasRelative);
SceneObservations obs = createObservations(rand, structure);
double[] param = new double[structure.getParameterCount()];
new CodecSceneStructureMetric().encode(structure, param);
BundleAdjustmentMetricResidualFunction alg = new BundleAdjustmentMetricResidualFunction();
alg.configure(structure, obs);
double[] expected = new double[alg.getNumOfOutputsM()];
double[] found = new double[alg.getNumOfOutputsM()];
alg.process(param, expected);
alg.process(param, found);
assertArrayEquals(expected, found, UtilEjml.TEST_F64);
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class TestMultiBaselineStereoIndependent method simulate_constant_disparity.
/**
* The plane being viewed and the camera's image plane are parallel causing the disparity to have a constant value
* making it easy check for correctness.
*
* @param tolFilled What fraction of fused image should be filled
* @param tolCorrect Out of the filled pixels what fraction need to have the correct disparity
*/
void simulate_constant_disparity(Se3_F64 world_to_view1, Se3_F64 world_to_view2, double tolFilled, double tolCorrect) {
// Each camera is different.
List<CameraPinholeBrown> listIntrinsic = new ArrayList<>();
listIntrinsic.add(new CameraPinholeBrown().fsetK(150, 140, 0, 105, 100, 210, 200).fsetRadial(0.02, -0.003));
listIntrinsic.add(new CameraPinholeBrown().fsetK(151, 142, 0, 107.5, 102.5, 215, 205).fsetRadial(0.03, -0.001));
listIntrinsic.add(new CameraPinholeBrown().fsetK(149, 141, 0, 102.5, 107.5, 205, 215).fsetRadial(0.001, 0.003));
// Create the scene. This will be used as input into MultiViewToFusedDisparity and in the simulator
var scene = new SceneStructureMetric(true);
scene.initialize(3, 3, 0);
scene.setCamera(0, true, listIntrinsic.get(0));
scene.setCamera(1, true, listIntrinsic.get(1));
scene.setCamera(2, true, listIntrinsic.get(2));
// All views are looking head on at the target. The 2nd and third view have been offset to ensure full coverage and that
// it's incorporating all the views, otherwise there would be large gaps
scene.setView(0, 0, true, eulerXyz(0, 0, 0, 0.0, 0, 0, null));
scene.setView(1, 1, true, world_to_view1);
scene.setView(2, 2, true, world_to_view2);
var lookup = new MockLookUp();
var alg = new MultiBaselineStereoIndependent<>(lookup, ImageType.SB_F32);
// Not mocking disparity because of how complex that would be to pull off. This makes it a bit of an inexact
// science to ensure fill in
var configDisp = new ConfigDisparityBMBest5();
configDisp.errorType = DisparityError.SAD;
configDisp.texture = 0.05;
configDisp.disparityMin = 20;
configDisp.disparityRange = 80;
alg.stereoDisparity = FactoryStereoDisparity.blockMatchBest5(configDisp, GrayF32.class, GrayF32.class);
// Textured target that stereo will work well on
var texture = new GrayF32(100, 100);
ImageMiscOps.fillUniform(texture, rand, 50, 255);
SimulatePlanarWorld sim = new SimulatePlanarWorld();
sim.addSurface(eulerXyz(0, 0, 2, 0, Math.PI, 0, null), 3, texture);
List<GrayF32> images = new ArrayList<>();
TIntObjectMap<String> sbaIndexToViewID = new TIntObjectHashMap<>();
for (int i = 0; i < listIntrinsic.size(); i++) {
sbaIndexToViewID.put(i, i + "");
sim.setCamera(listIntrinsic.get(i));
sim.setWorldToCamera(scene.motions.get(i).motion);
images.add(sim.render().clone());
if (visualize)
ShowImages.showWindow(images.get(images.size() - 1), "Frame " + i);
}
lookup.images = images;
assertTrue(alg.process(scene, 0, DogArray_I32.array(1, 2), sbaIndexToViewID::get));
GrayF32 found = alg.getFusedDisparity();
assertEquals(listIntrinsic.get(0).width, found.width);
assertEquals(listIntrinsic.get(0).height, found.height);
if (visualize) {
ShowImages.showWindow(VisualizeImageData.disparity(found, null, 100, 0x00FF00), "Disparity");
BoofMiscOps.sleep(60_000);
}
DisparityParameters param = alg.getFusedParam();
// Check the results. Since the target fills the view and is a known constant Z away we can that here.
// however since a real disparity algorithm is being used its inputs will not be perfect
int totalFilled = 0;
int totalCorrect = 0;
for (int y = 0; y < found.height; y++) {
for (int x = 0; x < found.width; x++) {
float d = found.get(x, y);
assertTrue(d >= 0);
if (d >= param.disparityRange)
continue;
double Z = param.baseline * param.pinhole.fx / (d + param.disparityMin);
if (Math.abs(Z - 2.0) <= 0.1)
totalCorrect++;
totalFilled++;
}
}
int N = found.width * found.height;
assertTrue(N * tolFilled <= totalFilled);
assertTrue(totalFilled * tolCorrect <= totalCorrect);
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class TestMultiBaselineStereoIndependent method handleOneCameraManyViews.
/**
* In this scene there is only one camera for several views
*/
@Test
void handleOneCameraManyViews() {
var scene = new SceneStructureMetric(true);
scene.initialize(1, 3, 0);
scene.setCamera(0, true, new CameraPinholeBrown().fsetK(30, 30, 0, 25, 25, 50, 50));
for (int i = 0; i < 3; i++) {
scene.setView(i, 0, true, eulerXyz(i, 0, 0, 0, 0, 0, null));
}
var alg = new MultiBaselineStereoIndependent<>(ImageType.SB_F32);
var configDisp = new ConfigDisparityBMBest5();
configDisp.errorType = DisparityError.SAD;
configDisp.disparityRange = 5;
alg.stereoDisparity = FactoryStereoDisparity.blockMatchBest5(configDisp, GrayF32.class, GrayF32.class);
List<GrayF32> images = new ArrayList<>();
TIntObjectMap<String> sbaIndexToViewID = new TIntObjectHashMap<>();
for (int i = 0; i < 3; i++) {
images.add(new GrayF32(50, 50));
sbaIndexToViewID.put(i, i + "");
}
alg.lookUpImages = new MockLookUp(images);
// Override so that it will always be happy
alg.performFusion = new MultiBaselineDisparityMedian() {
@Override
public boolean process(GrayF32 disparity) {
return true;
}
};
// just see if it blows up
assertTrue(alg.process(scene, 0, DogArray_I32.array(1, 2), sbaIndexToViewID::get));
}
use of boofcv.abst.geo.bundle.SceneStructureMetric in project BoofCV by lessthanoptimal.
the class TestColorizeMultiViewStereoResults method simple_processMvsCloud.
/**
* Two images contribute to the point cloud. Each image has a different color so that the source of the RGB
* value can be easily found.
*/
@Test
void simple_processMvsCloud() {
// scene with two views that are identical
var scene = new SceneStructureMetric(false);
scene.initialize(1, 2, 0);
scene.setCamera(0, true, new CameraPinhole(200, 200, 0, width / 2, height / 2, 0, 0));
scene.setView(0, 0, true, SpecialEuclideanOps_F64.eulerXyz(0, 0, 0, 0, 0, 0, null));
scene.setView(1, 0, true, SpecialEuclideanOps_F64.eulerXyz(0, 0, 0, 0, 0, 0, null));
// Create two views
var mvs = new MultiViewStereoFromKnownSceneStructure<>(new MockLookUp(), ImageType.SB_U8);
mvs.listCenters.add(new MultiViewStereoFromKnownSceneStructure.ViewInfo());
mvs.listCenters.get(0).metric = scene.views.get(0);
mvs.listCenters.get(0).relations = new StereoPairGraph.Vertex();
mvs.listCenters.get(0).relations.id = "10";
mvs.listCenters.get(0).relations.indexSba = 0;
mvs.listCenters.add(new MultiViewStereoFromKnownSceneStructure.ViewInfo());
mvs.listCenters.get(1).metric = scene.views.get(1);
mvs.listCenters.get(1).relations = new StereoPairGraph.Vertex();
mvs.listCenters.get(1).relations.id = "15";
mvs.listCenters.get(1).relations.indexSba = 1;
// One point for each view. Both points are in the image center
mvs.disparityCloud.viewPointIdx.setTo(0, 1, 2);
mvs.disparityCloud.cloud.grow().setTo(0, 0, 1);
mvs.disparityCloud.cloud.grow().setTo(0, 0, 1);
var alg = new ColorizeMultiViewStereoResults<>(new LookUpColorRgbFormats.SB_U8(), new MockLookUp());
alg.processMvsCloud(scene, mvs, (idx, r, g, b) -> {
// we can assume the first view is called first, but that's not strictly required to be correct
int expected = idx == 0 ? 10 : 15;
assertEquals(expected, r);
assertEquals(expected, g);
assertEquals(expected, b);
count++;
});
// make sure the functions were called
assertEquals(2, count);
}
Aggregations