use of boofcv.struct.image.ImageUInt8 in project narchy by automenta.
the class RasterHierarchy method rasterizeImage.
// /**
// * Generate the raster hierarchy for a given image.
// *
// * @param input The image to rasterize
// * @return The rasterized image.
// */
//
// public class Value {
// public int x;
// public int y;
// public float r, g, b;
// public float diff;
//
// public Value() {
// }
//
// public void set(float r, float g, float b, int xsize, int ysize, int x, int y) {
// this.x = x;
// this.y = y;
//
// float dr = (r - (this.r));
// float dg = (g - (this.g));
// float db = (b - (this.b));
// float newDiff = (dr * dr + dg * dg + db * db) * xsize * ysize;
// if (this.diff < newDiff) {
// this.diff = newDiff;
// this.r = r;
// this.g = g;
// this.b = b;
// }
// }
//
// @Override
// public String toString() {
// return x + "," + y + ":" + r + "=" + diff;
// }
//
// public void fade() {
// diff *= 0.9f;
// }
// }
public synchronized BufferedImage rasterizeImage(BufferedImage input) {
if (input == null)
return null;
// voter.clear();
// vladimir
boolean putin = false;
cnt--;
if (cnt == 0) {
putin = true;
cnt = updaterate;
}
long ntime = nar.time();
float red, green, blue;
int redSum, greenSum, blueSum;
int x, y, startX, startY;
float newX, newY;
int width = input.getWidth();
int height = input.getHeight();
float fblockXSize = width / divisions;
float fblockYSize = height / divisions;
multiInputImg = ConvertBufferedImage.convertFromMulti(input, multiInputImg, true, ImageUInt8.class);
final ImageUInt8 ib0 = multiInputImg.getBand(0);
final ImageUInt8 ib1 = multiInputImg.getBand(1);
final ImageUInt8 ib2 = multiInputImg.getBand(2);
MultiSpectral<ImageUInt8> output = new MultiSpectral<>(ImageUInt8.class, width, height, 3);
BufferedImage rasterizedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
// Set the initial raster region
float regionWidth = width, regionHeight = height;
newX = 0;
newY = 0;
startX = 0;
startY = 0;
for (int step = 1; step <= numberRasters; step++) {
if (step > 1) {
newX = startX + (regionWidth - regionWidth / scalingFactor) / scalingFactor;
newY = startY + (regionHeight - regionHeight / scalingFactor) / scalingFactor;
if (newX < 0) {
newX = 0;
}
if (newY < 0) {
newY = 0;
}
regionWidth = regionWidth / scalingFactor;
regionHeight = regionHeight / scalingFactor;
fblockXSize = fblockXSize / scalingFactor;
fblockYSize = fblockYSize / scalingFactor;
if (fblockXSize < 1) {
fblockXSize = 1;
}
if (fblockYSize < 1) {
fblockYSize = 1;
}
}
// Set the starting point for the next step
startX = Math.round(this.focusPoint.getX() - ((regionWidth) / 2));
startY = Math.round(this.focusPoint.getY() - ((regionHeight) / 2));
int blockXSize = Math.round(fblockXSize);
int blockYSize = Math.round(fblockYSize);
// Number of pixels per block
float pixelCount = blockXSize * blockYSize;
int h = 0, j = 0;
// StringBuilder to hold the Narsese translation
for (x = Math.round(newX); x < ((step == 1 ? 0 : startX) + regionWidth); x += blockXSize) {
h++;
for (y = Math.round(newY); y < ((step == 1 ? 0 : startY) + regionHeight); y += blockYSize) {
j++;
redSum = 0;
greenSum = 0;
blueSum = 0;
for (int pixelX = 0; (pixelX < blockXSize) && (x + pixelX < width); pixelX++) {
for (int pixelY = 0; (pixelY < blockYSize) && (y + pixelY < height); pixelY++) {
redSum += ib0.get(x + pixelX, y + pixelY);
greenSum += ib1.get(x + pixelX, y + pixelY);
blueSum += ib2.get(x + pixelX, y + pixelY);
}
}
red = redSum / pixelCount;
green = greenSum / pixelCount;
blue = blueSum / pixelCount;
float fred = red / 256.0f;
// was: red / 255f
float fgreen = green / 256.0f;
// was: blue/255f
float fblue = blue / 256.0f;
// manage move heuristic
// maybe not needed
float brightness = (red + green + blue) / 3;
// int key=step+10*x+10000*y;
// int key = (step * (int)pixelCount) + y * frameWidth + x;
int key = /*(step * (int)pixelCount) +*/
y * frameWidth + x;
if (putin) {
// Value value = voter.get(key);
//
// if (value == null) {
// value = new Value();
// voter.put(key, value);
// }
//
// value.set(fred, fgreen, fblue, blockXSize, blockYSize,
// x + blockXSize / 2,
// y + blockYSize / 2
// );
}
if (putin && /* && step == numberRasters)*/
(ntime != lastInputTime)) {
// input Narsese translation, one statement for each band.
// ArrayList<String> nalStrings = new ArrayList<String>();
// nalStrings.add("<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> RED>. :|: %"+String.valueOf(fred)+System.getProperty("line.separator"));
// nalStrings.add("<(*,r" + String.valueOf(step) + "," + String.valueOf(h) + "," + String.valueOf(j) + ") --> GREEN>. :|: %" + String.valueOf(fgreen) + System.getProperty("line.separator"));
// nalStrings.add("<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> BLUE>. :|: %"+String.valueOf(fblue)+System.getProperty("line.separator"));
/* Here we use the gamma corrected, grayscale version of the image. Use CCIR 601 weights to convert.
* If it is desirable to use only one sentence (vs RGB for example) then use this.
* see: https://en.wikipedia.org/wiki/Luma_%28video%29 or http://cadik.posvete.cz/color_to_gray_evaluation */
float dgray = 0.2989f * red + 0.5870f * green + 0.1140f * blue;
dgray /= 256.0f;
// TODO create the Term / Task programmaticaly
// nar.input("<(*,r" + String.valueOf(step) + "," + String.valueOf(h) + "," + String.valueOf(j) + ") --> GRAY>. :|: %" + String.valueOf(dgray) + System.getProperty("line.separator"));
input(h, j, fblockXSize, fblockYSize, dgray);
}
ImageMiscOps.fillRectangle(output.getBand(0), Math.round(red), x, y, blockXSize, blockYSize);
ImageMiscOps.fillRectangle(output.getBand(1), Math.round(green), x, y, blockXSize, blockYSize);
ImageMiscOps.fillRectangle(output.getBand(2), Math.round(blue), x, y, blockXSize, blockYSize);
}
}
}
// //search for maximum vote to move heuristic
// if (putin) {
// final Value[] maxvalue = {null};
// float threshold = 0.05f;
// voter.forEachKeyValue((key, value) -> {
// if (maxvalue[0] == null || value.diff > maxvalue[0].diff) {
// if (value.diff > threshold)
// maxvalue[0] = value;
// }
// value.fade();
// });
//
//
// Value maxValue = maxvalue[0];
// if (maxValue != null && maxValue.x != 0 && maxValue.y != 0) {
// this.setFocus(maxValue.x, maxValue.y);
// }
//
// }
lastInputTime = ntime;
ConvertBufferedImage.convertTo(output, rasterizedImage, true);
// Create a Fast Hessian detector from the SURF paper.
// Other detectors can be used in this example too.
InterestPointDetector detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(4, 2, 8, 2, 9, 3, 8));
// find interest points in the image
detector.detect(ib0);
displayResults(rasterizedImage, detector, Color.RED);
// find interest points in the image
detector.detect(ib1);
displayResults(rasterizedImage, detector, Color.BLUE);
// find interest points in the image
detector.detect(ib2);
displayResults(rasterizedImage, detector, Color.GREEN);
return rasterizedImage;
}
Aggregations