use of boofcv.alg.distort.PixelTransformAffine_F32 in project BoofCV by lessthanoptimal.
the class TestImplPolynomialPixel_I method compareToBilinear.
/**
* Polynomial interpolation of order one is bilinear interpolation
*/
@Test
public void compareToBilinear() {
GrayU8 img = new GrayU8(width, height);
GrayU8 expected = new GrayU8(width, height);
GrayU8 found = new GrayU8(width, height);
GImageMiscOps.fillUniform(img, rand, 0, 255);
Affine2D_F32 tran = new Affine2D_F32(1, 0, 0, 1, 0.25f, 0.25f);
// set it up so that it will be equivalent to bilinear interpolation
InterpolatePixelS<GrayU8> alg = (InterpolatePixelS) new ImplPolynomialPixel_I(2, 0, 255);
alg.setBorder(FactoryImageBorder.singleValue(GrayU8.class, 0));
ImageDistort<GrayU8, GrayU8> distorter = FactoryDistort.distortSB(false, alg, GrayU8.class);
distorter.setModel(new PixelTransformAffine_F32(tran));
distorter.apply(img, found);
InterpolatePixelS<GrayU8> bilinear = FactoryInterpolation.bilinearPixelS(GrayU8.class, BorderType.ZERO);
distorter = FactoryDistort.distortSB(false, bilinear, GrayU8.class);
distorter.setModel(new PixelTransformAffine_F32(tran));
distorter.apply(img, expected);
BoofTesting.assertEquals(expected, found, 0);
}
use of boofcv.alg.distort.PixelTransformAffine_F32 in project BoofCV by lessthanoptimal.
the class FDistort method affine.
/**
* Affine transform from input to output
*/
public FDistort affine(double a11, double a12, double a21, double a22, double dx, double dy) {
PixelTransformAffine_F32 transform;
if (outputToInput != null && outputToInput instanceof PixelTransformAffine_F32) {
transform = (PixelTransformAffine_F32) outputToInput;
} else {
transform = new PixelTransformAffine_F32();
}
Affine2D_F32 m = new Affine2D_F32();
m.a11 = (float) a11;
m.a12 = (float) a12;
m.a21 = (float) a21;
m.a22 = (float) a22;
m.tx = (float) dx;
m.ty = (float) dy;
m.invert(transform.getModel());
return transform(transform);
}
use of boofcv.alg.distort.PixelTransformAffine_F32 in project BoofCV by lessthanoptimal.
the class FDistort method scale.
/**
* <p>Applies a distortion which will rescale the input image into the output image. You
* might want to consider using {@link #scaleExt()} instead since it sets the border behavior
* to extended, which is probably what you want to do.</p>
*
* NOTE: Checks to see if it can recycle the previous transform and update it with a new affine model
* to avoid declaring new memory.
*/
public FDistort scale() {
if (outputToInput != null && outputToInput instanceof PixelTransformAffine_F32) {
PixelTransformAffine_F32 affine = (PixelTransformAffine_F32) outputToInput;
DistortSupport.transformScale(output, input, affine);
return this;
} else {
return transform(DistortSupport.transformScale(output, input, null));
}
}
use of boofcv.alg.distort.PixelTransformAffine_F32 in project BoofCV by lessthanoptimal.
the class TestRefinePolygonToGrayLine method fit_perfect_transform.
public void fit_perfect_transform(boolean black, Affine2D_F64 regToDist, Class imageType) {
this.transform.set(regToDist);
renderDistortedRectangles(black, imageType);
RefinePolygonToGrayLine alg = createAlg(4, imageType);
Polygon2D_F64 input = createFromSquare(null);
Polygon2D_F64 expected = input.copy();
Polygon2D_F64 found = new Polygon2D_F64(4);
alg.setImage(image);
// fail without the transform
assertFalse(alg.refine(input, found));
// work when the transform is applied
PixelTransformAffine_F32 transform = new PixelTransformAffine_F32();
Affine2D_F32 regToDist_F32 = new Affine2D_F32();
ConvertFloatType.convert(regToDist, regToDist_F32);
transform.set(regToDist_F32);
alg.setTransform(transform);
alg.setImage(image);
assertTrue(alg.refine(input, found));
// should be close to the expected
assertTrue(expected.isIdentical(found, 0.3));
}
use of boofcv.alg.distort.PixelTransformAffine_F32 in project BoofCV by lessthanoptimal.
the class TestBinaryEllipseDetector method distortedImage.
/**
* Input image is distorted
*/
@Test
public void distortedImage() {
List<EllipseRotated_F64> original = new ArrayList<>();
original.add(new EllipseRotated_F64(50, 65, 20, 10, 0.5));
original.add(new EllipseRotated_F64(90, 100, 25, 25, 0));
GrayU8 image = TestBinaryEllipseDetectorPixel.renderEllipses_F64(200, 210, original, 0);
GrayU8 binary = image.createSameShape();
ThresholdImageOps.threshold(image, binary, 30, true);
BinaryEllipseDetector<GrayU8> alg = create();
PixelTransform2_F32 distToUndist = new PixelTransformAffine_F32(new Affine2D_F32(1, 0, 0, 1, 5, 8));
PixelTransform2_F32 undistToDist = new PixelTransformAffine_F32(new Affine2D_F32(1, 0, 0, 1, -5, -8));
alg.setLensDistortion(distToUndist, undistToDist);
alg.process(image, binary);
// adjust the ellipses using the transform
List<EllipseRotated_F64> expected = new ArrayList<>();
for (EllipseRotated_F64 o : original) {
EllipseRotated_F64 e = new EllipseRotated_F64(o);
e.center.x += 5;
e.center.y += 8;
expected.add(e);
}
List<EllipseRotated_F64> found = alg.getFoundEllipses(null);
TestBinaryEllipseDetectorPixel.checkEquals_F64(expected, found, 1.0, 0.1);
}
Aggregations