use of boofcv.struct.calib.CameraPinholeRadial in project BoofCV by lessthanoptimal.
the class GenericPlanarCalibrationDetectorChecks method renderEasy.
protected GrayF32 renderEasy(Object layout, List<Point2D_F64> locations2D) {
CameraPinholeRadial model = CalibrationIO.load(getClass().getResource("pinhole_radial.yaml"));
if (locations2D == null)
locations2D = new ArrayList<>();
GrayF32 pattern = new GrayF32(1, 1);
renderTarget(layout, simulatedTargetWidth, pattern, locations2D);
SimulatePlanarWorld simulator = new SimulatePlanarWorld();
simulator.setCamera(model);
Se3_F64 markerToWorld = new Se3_F64();
markerToWorld.T.set(0, 0, 0.5);
simulator.addTarget(markerToWorld, simulatedTargetWidth, pattern);
simulator.render();
return simulator.getOutput();
}
use of boofcv.struct.calib.CameraPinholeRadial in project BoofCV by lessthanoptimal.
the class CalibratePinholePlanarGuiApp method process.
public void process(String outputFileName) {
calibrator.reset();
final ProcessThread monitor = new ProcessThread();
monitor.start();
for (int i = 0; i < images.size(); i++) {
final File file = new File(images.get(i));
final BufferedImage orig = media.openImage(images.get(i));
if (orig != null) {
GrayF32 input = ConvertBufferedImage.convertFrom(orig, (GrayF32) null);
if (detector.process(input)) {
calibrator.addImage(detector.getDetectedPoints());
SwingUtilities.invokeLater(() -> {
gui.addImage(file);
gui.repaint();
monitor.setMessage(0, file.getName());
});
} else {
System.out.println("Failed to detect image. " + file.getName());
}
} else {
System.out.println("Failed to load " + images.get(i));
}
}
SwingUtilities.invokeLater(() -> gui.setObservations(calibrator.getObservations()));
gui.repaint();
SwingUtilities.invokeLater(() -> monitor.setMessage(1, "Estimating Parameters"));
final CameraPinholeRadial param = calibrator.process();
SwingUtilities.invokeLater(() -> {
gui.setResults(calibrator.getErrors());
gui.setCalibration(calibrator.getZhangParam());
});
monitor.stopThread();
if (outputFileName != null)
CalibrationIO.save(param, outputFileName);
// tell it how to undistort the image
SwingUtilities.invokeLater(() -> {
gui.setCorrection(param);
gui.repaint();
});
// print the output
calibrator.printStatistics();
System.out.println();
System.out.println("--- Intrinsic Parameters ---");
System.out.println();
param.print();
}
use of boofcv.struct.calib.CameraPinholeRadial in project BoofCV by lessthanoptimal.
the class RemoveLensDistortionApp method openFile.
@Override
public void openFile(File file) {
File[] candidates = new File[] { new File(file.getParent(), "intrinsic.yaml"), // this is a bit of a hack...
new File(file.getParent(), "intrinsicLeft.yaml"), new File(file.getParent(), file.getName() + ".yaml") };
CameraPinholeRadial model = null;
for (File c : candidates) {
if (c.exists()) {
model = CalibrationIO.load(c);
break;
}
}
if (model == null) {
System.err.println("Can't find camera model for this image");
return;
}
this.param = model;
super.openFile(file);
}
use of boofcv.struct.calib.CameraPinholeRadial in project BoofCV by lessthanoptimal.
the class VisualizeSquareFiducial method process.
public void process(String nameImage, String nameIntrinsic) {
CameraPinholeRadial intrinsic = nameIntrinsic == null ? null : (CameraPinholeRadial) CalibrationIO.load(nameIntrinsic);
GrayF32 input = UtilImageIO.loadImage(nameImage, GrayF32.class);
GrayF32 undistorted = new GrayF32(input.width, input.height);
Detector detector = new Detector();
if (intrinsic != null) {
CameraPinholeRadial paramUndist = new CameraPinholeRadial();
ImageDistort<GrayF32, GrayF32> undistorter = LensDistortionOps.changeCameraModel(AdjustmentType.EXPAND, BorderType.EXTENDED, intrinsic, new CameraPinhole(intrinsic), paramUndist, ImageType.single(GrayF32.class));
detector.configure(new LensDistortionRadialTangential(paramUndist), paramUndist.width, paramUndist.height, false);
undistorter.apply(input, undistorted);
} else {
undistorted.setTo(input);
}
detector.process(undistorted);
System.out.println("Total Found: " + detector.squares.size());
FastQueue<FoundFiducial> fiducials = detector.getFound();
int N = Math.min(20, detector.squares.size());
ListDisplayPanel squares = new ListDisplayPanel();
for (int i = 0; i < N; i++) {
squares.addImage(ConvertBufferedImage.convertTo(detector.squares.get(i), null), " " + i);
}
BufferedImage output = new BufferedImage(input.width, input.height, BufferedImage.TYPE_INT_RGB);
VisualizeBinaryData.renderBinary(detector.getBinary(), false, output);
Graphics2D g2 = output.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setColor(Color.RED);
g2.setStroke(new BasicStroke(2));
if (intrinsic != null) {
Point2Transform2_F64 add_p_to_p = LensDistortionOps.narrow(intrinsic).distort_F64(true, true);
for (int i = 0; i < N; i++) {
// add back in lens distortion
Quadrilateral_F64 q = fiducials.get(i).distortedPixels;
apply(add_p_to_p, q.a, q.a);
apply(add_p_to_p, q.b, q.b);
apply(add_p_to_p, q.c, q.c);
apply(add_p_to_p, q.d, q.d);
VisualizeShapes.draw(q, g2);
}
}
BufferedImage outputGray = new BufferedImage(input.width, input.height, BufferedImage.TYPE_INT_RGB);
ConvertBufferedImage.convertTo(undistorted, outputGray);
g2 = outputGray.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
for (int i = 0; i < N; i++) {
// add back in lens distortion
Quadrilateral_F64 q = fiducials.get(i).distortedPixels;
// g2.setStroke(new BasicStroke(2));
// VisualizeBinaryData.render(detector.getSquareDetector().getUsedContours(),Color.BLUE,outputGray);
VisualizeShapes.drawArrowSubPixel(q, 3, 1, g2);
}
ShowImages.showWindow(output, "Binary");
ShowImages.showWindow(outputGray, "Gray");
ShowImages.showWindow(squares, "Candidates");
}
use of boofcv.struct.calib.CameraPinholeRadial in project BoofCV by lessthanoptimal.
the class ExampleStereoTwoViewsOneCamera method main.
public static void main(String[] args) {
// specify location of images and calibration
String calibDir = UtilIO.pathExample("calibration/mono/Sony_DSC-HX5V_Chess/");
String imageDir = UtilIO.pathExample("stereo/");
// Camera parameters
CameraPinholeRadial intrinsic = CalibrationIO.load(new File(calibDir, "intrinsic.yaml"));
// Input images from the camera moving left to right
BufferedImage origLeft = UtilImageIO.loadImage(imageDir, "mono_wall_01.jpg");
BufferedImage origRight = UtilImageIO.loadImage(imageDir, "mono_wall_02.jpg");
// Input images with lens distortion
GrayU8 distortedLeft = ConvertBufferedImage.convertFrom(origLeft, (GrayU8) null);
GrayU8 distortedRight = ConvertBufferedImage.convertFrom(origRight, (GrayU8) null);
// matched features between the two images
List<AssociatedPair> matchedFeatures = ExampleFundamentalMatrix.computeMatches(origLeft, origRight);
// convert from pixel coordinates into normalized image coordinates
List<AssociatedPair> matchedCalibrated = convertToNormalizedCoordinates(matchedFeatures, intrinsic);
// Robustly estimate camera motion
List<AssociatedPair> inliers = new ArrayList<>();
Se3_F64 leftToRight = estimateCameraMotion(intrinsic, matchedCalibrated, inliers);
drawInliers(origLeft, origRight, intrinsic, inliers);
// Rectify and remove lens distortion for stereo processing
DMatrixRMaj rectifiedK = new DMatrixRMaj(3, 3);
GrayU8 rectifiedLeft = distortedLeft.createSameShape();
GrayU8 rectifiedRight = distortedRight.createSameShape();
rectifyImages(distortedLeft, distortedRight, leftToRight, intrinsic, rectifiedLeft, rectifiedRight, rectifiedK);
// compute disparity
StereoDisparity<GrayS16, GrayF32> disparityAlg = FactoryStereoDisparity.regionSubpixelWta(DisparityAlgorithms.RECT_FIVE, minDisparity, maxDisparity, 5, 5, 20, 1, 0.1, GrayS16.class);
// Apply the Laplacian across the image to add extra resistance to changes in lighting or camera gain
GrayS16 derivLeft = new GrayS16(rectifiedLeft.width, rectifiedLeft.height);
GrayS16 derivRight = new GrayS16(rectifiedLeft.width, rectifiedLeft.height);
LaplacianEdge.process(rectifiedLeft, derivLeft);
LaplacianEdge.process(rectifiedRight, derivRight);
// process and return the results
disparityAlg.process(derivLeft, derivRight);
GrayF32 disparity = disparityAlg.getDisparity();
// show results
BufferedImage visualized = VisualizeImageData.disparity(disparity, null, minDisparity, maxDisparity, 0);
BufferedImage outLeft = ConvertBufferedImage.convertTo(rectifiedLeft, null);
BufferedImage outRight = ConvertBufferedImage.convertTo(rectifiedRight, null);
ShowImages.showWindow(new RectifiedPairPanel(true, outLeft, outRight), "Rectification");
ShowImages.showWindow(visualized, "Disparity");
showPointCloud(disparity, outLeft, leftToRight, rectifiedK, minDisparity, maxDisparity);
System.out.println("Total found " + matchedCalibrated.size());
System.out.println("Total Inliers " + inliers.size());
}
Aggregations