use of org.ddogleg.struct.DogArray in project BoofCV by lessthanoptimal.
the class TestPointCloudIO method encode_decode_3D32F.
@Test
void encode_decode_3D32F() throws IOException {
List<Point3D_F32> expected = new ArrayList<>();
for (int i = 0; i < 10; i++) {
expected.add(new Point3D_F32(i * 123.45f, i - 1.01f, i + 2.34f));
}
Format[] formats = new Format[] { Format.PLY };
for (Format f : formats) {
DogArray<Point3D_F32> found = new DogArray<>(Point3D_F32::new);
found.grow().setTo(1, 1, 1);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
PointCloudIO.save3D(f, PointCloudReader.wrapF32(expected), false, stream);
InputStream input = new ByteArrayInputStream(stream.toByteArray());
PointCloudIO.load3D32F(f, input, found);
// make sure it cleared the points
assertEquals(expected.size(), found.size);
for (int i = 0; i < expected.size(); i++) {
assertEquals(0.0, found.get(i).distance(expected.get(i)), UtilEjml.TEST_F32);
}
}
}
use of org.ddogleg.struct.DogArray in project BoofCV by lessthanoptimal.
the class TestVisualDepthOps method depthTo3D.
@Test
void depthTo3D() {
GrayU16 depth = new GrayU16(width, height);
depth.set(200, 80, 3400);
depth.set(600, 420, 50);
DogArray<Point3D_F64> pts = new DogArray<>(Point3D_F64::new);
VisualDepthOps.depthTo3D(param, depth, pts);
assertEquals(2, pts.size());
assertEquals(0, compute(200, 80, 3400).distance(pts.get(0)), 1e-8);
assertEquals(0, compute(600, 420, 50).distance(pts.get(1)), 1e-8);
}
use of org.ddogleg.struct.DogArray in project BoofCV by lessthanoptimal.
the class SceneMergingOperations method getCommonFeaturePixelsViews.
/**
* Retrieves the pixel coordinates for all the other views in InlierInfo, excludes the first/target.
*
* @param db Used to look up image feature pixel coordinates
* @param inliers List of image features that were part of the inlier set in all the views
* @return List of observations in each view (expet the target) that are part of the inler and common set
*/
@SuppressWarnings("IntegerDivisionInFloatingPointContext")
List<DogArray<Point2D_F64>> getCommonFeaturePixelsViews(LookUpSimilarImages db, SceneWorkingGraph workingGraph, SceneWorkingGraph.InlierInfo inliers) {
List<DogArray<Point2D_F64>> listViewPixels = new ArrayList<>();
// Which features are inliers in view[0] / common view
DogArray_I32 viewZeroInlierIdx = inliers.observations.get(0);
// View 0 = common view and is skipped here
int numViews = inliers.observations.size;
for (int viewIdx = 1; viewIdx < numViews; viewIdx++) {
// Retrieve the feature pixel coordinates for this view
db.lookupPixelFeats(inliers.views.get(viewIdx).id, dbPixels);
// Which features are part of the inlier set
DogArray_I32 inlierIdx = inliers.observations.get(viewIdx);
BoofMiscOps.checkEq(viewZeroInlierIdx.size, inlierIdx.size, "Inliers count should be the same");
// Create storage for all the pixels in each view
DogArray<Point2D_F64> viewPixels = new DogArray<>(Point2D_F64::new);
listViewPixels.add(viewPixels);
viewPixels.resize(zeroViewPixels.size);
// camera model assumes pixels have been recentered
SceneWorkingGraph.View wview = Objects.requireNonNull(workingGraph.views.get(inliers.views.get(viewIdx).id));
SceneWorkingGraph.Camera camera = workingGraph.getViewCamera(wview);
CameraPinholeBrown cameraPrior = camera.prior;
// Add the inlier pixels from this view to the array in the correct order
for (int idx = 0; idx < inlierIdx.size; idx++) {
// feature ID in view[0]
int viewZeroIdx = viewZeroInlierIdx.get(idx);
// feature ID in the common list
int commonFeatureIdx = zeroFeatureToCommonIndex.get(viewZeroIdx);
if (commonFeatureIdx < 0)
continue;
// Copy the pixel from this view into the appropriate location
Point2D_F64 p = viewPixels.get(commonFeatureIdx);
p.setTo(dbPixels.get(inlierIdx.get(idx)));
p.x -= cameraPrior.cx;
p.y -= cameraPrior.cy;
}
}
return listViewPixels;
}
use of org.ddogleg.struct.DogArray in project BoofCV by lessthanoptimal.
the class ThreeViewEstimateMetricScene method projectiveToMetric.
/**
* Estimates the transform from projective to metric geometry
*
* @return true if successful
*/
boolean projectiveToMetric() {
// homography from projective to metric
listPinhole.clear();
boolean successfulSelfCalibration = false;
if (manualFocalLength <= 0) {
// Estimate calibration parameters
var config = new ConfigSelfCalibDualQuadratic();
// var config = new ConfigSelfCalibEssentialGuess();
// config.numberOfSamples = 200;
// config.fixedFocus = true;
// config.sampleMin = 0.6;
// config.sampleMax = 1.5;
ProjectiveToMetricCameras selfcalib = FactoryMultiView.projectiveToMetric(config);
List<ElevateViewInfo> views = new ArrayList<>();
for (int i = 0; i < 3; i++) {
views.add(new ElevateViewInfo(width, height, i));
}
List<DMatrixRMaj> cameras = new ArrayList<>();
cameras.add(P2);
cameras.add(P3);
DogArray<AssociatedTuple> observations = new DogArray<>(() -> new AssociatedTupleN(3));
MultiViewOps.convertTr(ransac.getMatchSet(), observations);
var results = new MetricCameras();
boolean success = selfcalib.process(views, cameras, observations.toList(), results);
if (success) {
successfulSelfCalibration = true;
listPinhole.addAll(results.intrinsics.toList());
listWorldToView.get(0).reset();
listWorldToView.get(1).setTo(results.motion_1_to_k.get(0));
listWorldToView.get(2).setTo(results.motion_1_to_k.get(1));
if (verbose != null)
verbose.println("Auto calibration success");
} else {
if (verbose != null)
verbose.println("Auto calibration failed");
}
}
if (!successfulSelfCalibration) {
// Use provided focal length or guess using an "average" focal length across cameras
double focalLength = manualFocalLength <= 0 ? (double) (Math.max(width, height) / 2) : manualFocalLength;
if (verbose != null)
verbose.println("Assuming fixed focal length for all views. f=" + focalLength);
final var estimateH = new TwoViewToCalibratingHomography();
DMatrixRMaj F21 = MultiViewOps.projectiveToFundamental(P2, null);
estimateH.initialize(F21, P2);
DMatrixRMaj K = PerspectiveOps.pinholeToMatrix(focalLength, focalLength, 0, 0, 0);
DogArray<AssociatedPair> pairs = new DogArray<>(AssociatedPair::new);
MultiViewOps.convertTr(ransac.getMatchSet(), 0, 1, pairs);
if (!estimateH.process(K, K, pairs.toList()))
throw new RuntimeException("Failed to estimate H given 'known' intrinsics");
// Use the found calibration homography to find motion estimates
DMatrixRMaj H = estimateH.getCalibrationHomography();
listPinhole.clear();
for (int i = 0; i < 3; i++) {
listPinhole.add(PerspectiveOps.matrixToPinhole(K, width, height, null));
}
listWorldToView.get(0).reset();
MultiViewOps.projectiveToMetric(P2, H, listWorldToView.get(1), K);
MultiViewOps.projectiveToMetric(P3, H, listWorldToView.get(2), K);
}
if (verbose != null) {
verbose.println("Initial Intrinsic Estimate:");
for (int i = 0; i < 3; i++) {
CameraPinhole r = listPinhole.get(i);
verbose.printf(" fx = %6.1f, fy = %6.1f, skew = %6.3f\n", r.fx, r.fy, r.skew);
}
verbose.println("Initial Motion Estimate:");
}
// scale is arbitrary. Set max translation to 1
double maxT = 0;
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
maxT = Math.max(maxT, world_to_view.T.norm());
}
for (int i = 0; i < listWorldToView.size(); i++) {
Se3_F64 world_to_view = listWorldToView.get(i);
world_to_view.T.scale(1.0 / maxT);
if (verbose != null) {
Rodrigues_F64 rod = ConvertRotation3D_F64.matrixToRodrigues(world_to_view.R, null);
verbose.println(" T=" + world_to_view.T + " R=" + rod);
}
}
return true;
}
use of org.ddogleg.struct.DogArray in project BoofCV by lessthanoptimal.
the class TestPointTrackerPerfectCloud method multipleCallsStatic.
/**
* The view isn't moving and it's viewing the same objects
*/
@Test
void multipleCallsStatic() {
var tracker = new PointTrackerPerfectCloud<>();
tracker.setCamera(new CameraPinhole(200, 200, 0, 200, 200, 400, 400));
tracker.cloud.add(new Point3D_F64(0, 0, 2));
tracker.cloud.add(new Point3D_F64(0.5, 0, 3));
tracker.cloud.add(new Point3D_F64(0, 0.5, -2));
tracker.process(null);
tracker.spawnTracks();
DogArray<Point2D_F64> expected = new DogArray<>(Point2D_F64::new);
tracker.getNewTracks(null).forEach(t -> expected.grow().setTo(t.pixel));
for (int i = 0; i < 5; i++) {
tracker.process(null);
assertEquals(2, tracker.getTotalActive());
assertEquals(0, tracker.getTotalInactive());
assertTrue(tracker.getDroppedTracks(null).isEmpty());
List<PointTrack> active = tracker.getActiveTracks(null);
BoofMiscOps.forIdx(active, (idx, t) -> {
assertEquals(0.0, t.pixel.distance(expected.get(idx)), UtilEjml.TEST_F64);
});
}
}
Aggregations