use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.
the class TestSnapToLineEdge method setup.
private void setup(Affine2D_F64 affine, Class imageType) {
work = GeneralizedImageOps.createSingleBand(imageType, width, height);
image = GeneralizedImageOps.createSingleBand(imageType, width, height);
int bg = white;
int fg = 0;
GImageMiscOps.fill(work, bg);
GImageMiscOps.fillRectangle(work, fg, x0, y0, x1 - x0, y1 - y0);
if (affine != null) {
new FDistort(work, image).border(bg).affine(affine).apply();
} else {
image.setTo(work);
}
// BufferedImage out = ConvertBufferedImage.convertTo(image, null, true);
// ShowImages.showWindow(out, "Rendered");
// try {
// Thread.sleep(3000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
}
use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.
the class DetectFiducialSquareImage method addPattern.
/**
* Adds a new image to the detector. Image must be gray-scale and is converted into
* a binary image using the specified threshold. All input images are rescaled to be
* square and of the appropriate size. Thus the original shape of the image doesn't
* matter. Square shapes are highly recommended since that's what the target looks like.
*
* @param inputBinary Binary input image pattern. 0 = black, 1 = white.
* @param lengthSide How long one of the sides of the target is in world units.
* @return The ID of the provided image
*/
public int addPattern(GrayU8 inputBinary, double lengthSide) {
if (inputBinary == null) {
throw new IllegalArgumentException("Input image is null.");
} else if (lengthSide <= 0) {
throw new IllegalArgumentException("Parameter lengthSide must be more than zero");
} else if (ImageStatistics.max(inputBinary) > 1)
throw new IllegalArgumentException("A binary image is composed on 0 and 1 pixels. This isn't binary!");
// see if it needs to be resized
if (inputBinary.width != squareLength || inputBinary.height != squareLength) {
// need to create a new image and rescale it to better handle the resizing
GrayF32 inputGray = new GrayF32(inputBinary.width, inputBinary.height);
ConvertImage.convert(inputBinary, inputGray);
PixelMath.multiply(inputGray, 255, inputGray);
GrayF32 scaled = new GrayF32(squareLength, squareLength);
// See if it can use the better algorithm for scaling down the image
if (inputBinary.width > squareLength && inputBinary.height > squareLength) {
AverageDownSampleOps.down(inputGray, scaled);
} else {
new FDistort(inputGray, scaled).scaleExt().apply();
}
GThresholdImageOps.threshold(scaled, binary, 255 / 2.0, false);
} else {
binary.setTo(inputBinary);
}
// describe it in 4 different orientations
FiducialDef def = new FiducialDef();
def.lengthSide = lengthSide;
// CCW rotation so that the index refers to how many CW rotation it takes to put it into the nominal pose
binaryToDef(binary, def.desc[0]);
ImageMiscOps.rotateCCW(binary);
binaryToDef(binary, def.desc[1]);
ImageMiscOps.rotateCCW(binary);
binaryToDef(binary, def.desc[2]);
ImageMiscOps.rotateCCW(binary);
binaryToDef(binary, def.desc[3]);
int index = targets.size();
targets.add(def);
return index;
}
use of boofcv.abst.distort.FDistort in project BoofCV by lessthanoptimal.
the class ExampleDenseOpticalFlow method main.
public static void main(String[] args) {
MediaManager media = DefaultMediaManager.INSTANCE;
// String fileName0 = UtilIO.pathExample("denseflow/dogdance07.png");
// String fileName1 = UtilIO.pathExample("denseflow/dogdance08.png");
String fileName0 = UtilIO.pathExample("denseflow/Urban2_07.png");
String fileName1 = UtilIO.pathExample("denseflow/Urban2_08.png");
// String fileName0 = UtilIO.pathExample("denseflow/Grove2_07.png");
// String fileName1 = UtilIO.pathExample("denseflow/Grove2_09.png");
DenseOpticalFlow<GrayF32> denseFlow = // FactoryDenseOpticalFlow.hornSchunckPyramid(null,GrayF32.class);
FactoryDenseOpticalFlow.broxWarping(null, GrayF32.class);
BufferedImage buff0 = media.openImage(fileName0);
BufferedImage buff1 = media.openImage(fileName1);
GrayF32 full = new GrayF32(buff0.getWidth(), buff0.getHeight());
// Dense optical flow is very computationally expensive. Just process the image at 1/2 resolution
GrayF32 previous = new GrayF32(full.width / 2, full.height / 2);
GrayF32 current = previous.createSameShape();
ImageFlow flow = new ImageFlow(previous.width, previous.height);
ConvertBufferedImage.convertFrom(buff0, full);
new FDistort(full, previous).scaleExt().apply();
ConvertBufferedImage.convertFrom(buff1, full);
new FDistort(full, current).scaleExt().apply();
// compute dense motion
denseFlow.process(previous, current, flow);
// Visualize the results
PanelGridPanel gui = new PanelGridPanel(1, 2);
BufferedImage converted0 = new BufferedImage(current.width, current.height, BufferedImage.TYPE_INT_RGB);
BufferedImage converted1 = new BufferedImage(current.width, current.height, BufferedImage.TYPE_INT_RGB);
BufferedImage visualized = new BufferedImage(current.width, current.height, BufferedImage.TYPE_INT_RGB);
ConvertBufferedImage.convertTo(previous, converted0, true);
ConvertBufferedImage.convertTo(current, converted1, true);
VisualizeOpticalFlow.colorized(flow, 10, visualized);
AnimatePanel animate = new AnimatePanel(150, converted0, converted1);
gui.add(animate);
gui.add(visualized);
animate.start();
ShowImages.showWindow(gui, "Dense Optical Flow", true);
}
Aggregations