use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DetectCytokeratinCV method updateArea.
private static void updateArea(final MatVector contours, final Mat hierarchy, final Area area, int row, int depth) {
IntIndexer indexer = hierarchy.createIndexer();
while (row >= 0) {
int[] data = new int[4];
// TODO: Check indexing after switch to JavaCPP!!!
indexer.get(0, row, data);
// hierarchy.get(0, row, data);
Mat contour = contours.get(row);
// Don't include isolated pixels - otherwise add or remove, as required
if (contour.rows() > 2) {
Path2D path = getContour(contour);
if (depth % 2 == 0)
area.add(new Area(path));
else
area.subtract(new Area(path));
}
// Deal with any sub-contours
if (data[2] >= 0)
updateArea(contours, hierarchy, area, data[2], depth + 1);
// Move to next contour in this hierarchy level
row = data[0];
}
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DetectCytokeratinCV method getArea.
/**
* Get an Area object corresponding to contours in a binary image from OpenCV.
* @param mat
* @return
*/
private static Area getArea(final Mat mat) {
if (mat.empty())
return null;
// Identify all contours
MatVector contours = new MatVector();
Mat hierarchy = new Mat();
opencv_imgproc.findContours(mat, contours, hierarchy, opencv_imgproc.RETR_TREE, opencv_imgproc.CHAIN_APPROX_SIMPLE);
if (contours.empty()) {
hierarchy.close();
return null;
}
Area area = new Area();
updateArea(contours, hierarchy, area, 0, 0);
hierarchy.close();
return area;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class WatershedNucleiCV method watershedDistanceTransformSplit.
private static void watershedDistanceTransformSplit(Mat matBinary, int maxFilterRadius) {
Mat matWatershedSeedsBinary;
// Create a background mask
Mat matBackground = new Mat();
compare(matBinary, new Mat(1, 1, CV_32FC1, Scalar.WHITE), matBackground, CMP_NE);
// Separate by shape using the watershed transform
Mat matDistanceTransform = new Mat();
opencv_imgproc.distanceTransform(matBinary, matDistanceTransform, opencv_imgproc.CV_DIST_L2, opencv_imgproc.CV_DIST_MASK_PRECISE);
// Find local maxima
matWatershedSeedsBinary = new Mat();
opencv_imgproc.dilate(matDistanceTransform, matWatershedSeedsBinary, OpenCVTools.getCircularStructuringElement(maxFilterRadius));
compare(matDistanceTransform, matWatershedSeedsBinary, matWatershedSeedsBinary, CMP_EQ);
matWatershedSeedsBinary.setTo(new Mat(1, 1, matWatershedSeedsBinary.type(), Scalar.ZERO), matBackground);
// Dilate slightly to merge nearby maxima
opencv_imgproc.dilate(matWatershedSeedsBinary, matWatershedSeedsBinary, OpenCVTools.getCircularStructuringElement(2));
// Create labels for watershed
Mat matLabels = new Mat(matDistanceTransform.size(), CV_32F, Scalar.ZERO);
OpenCVTools.labelImage(matWatershedSeedsBinary, matLabels, opencv_imgproc.RETR_CCOMP);
// Remove everything outside the thresholded region
matLabels.setTo(new Mat(1, 1, matLabels.type(), Scalar.ZERO), matBackground);
// Do watershed
// 8-connectivity is essential for the watershed lines to be preserved - otherwise OpenCV's findContours could not be used
ProcessingCV.doWatershed(matDistanceTransform, matLabels, 0.1, true);
// Update the binary image to remove the watershed lines
multiply(matBinary, matLabels, matBinary, 1, matBinary.type());
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DefaultBlobFunction method toBlob.
@Override
public Mat toBlob(Mat... mats) {
// Preprocess the images
var preprocessed = new Mat[mats.length];
int ind = 0;
for (var mat : mats) {
var temp = mat.clone();
if (preprocessing != null) {
temp = preprocessing.apply(temp);
}
if (inputSize != null) {
if (crop) {
int w = inputSize.width();
int h = inputSize.height();
double factor = Math.max(w / (double) temp.cols(), h / (double) temp.rows());
opencv_imgproc.resize(temp, temp, new Size(), factor, factor, opencv_imgproc.INTER_LINEAR);
int x = (temp.cols() - w) / 2;
int y = (temp.rows() - h) / 2;
temp.put(OpenCVTools.crop(temp, x, y, w, h));
} else {
opencv_imgproc.resize(temp, temp, inputSize, 0, 0, opencv_imgproc.INTER_LINEAR);
}
}
preprocessed[ind] = temp;
ind++;
}
// Convert images to blob
return DnnTools.blobFromImages(preprocessed);
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DnnTools method blobFromImages.
/**
* Create an OpenCV blob from a batch of Mats with optional scaling, resizing and cropping.
* @param mats input images
* @param scaleFactor scale factor
* @param size input width and height
* @param mean mean values for subtraction
* @param swapRB swap red and blue of the mean values
* @param crop center crop after resizing if needed
* @return a blob with axis order NCHW
*/
public static Mat blobFromImages(Collection<Mat> mats, double scaleFactor, Size size, Scalar mean, boolean swapRB, boolean crop) {
// if (mat.depth() != opencv_core.CV_32F) {
// var mat2 = new Mat();
// mat.convertTo(mat2, opencv_core.CV_32F);
// mat2 = mat;
// }
Mat blob = null;
Mat first = mats.iterator().next();
int nChannels = first.channels();
if (nChannels == 1 || nChannels == 3 || nChannels == 4) {
if (mats.size() == 1)
blob = opencv_dnn.blobFromImage(first, scaleFactor, size, mean, swapRB, crop, opencv_core.CV_32F);
else
blob = opencv_dnn.blobFromImages(new MatVector(mats.toArray(Mat[]::new)), scaleFactor, size, mean, swapRB, crop, opencv_core.CV_32F);
} else {
// TODO: Don't have any net to test this with currently...
logger.warn("Attempting to reshape an image with " + nChannels + " channels - this may not work! " + "Only 1, 3 and 4 full supported, preprocessing will be ignored.");
// Blob is a 4D Tensor [NCHW]
int[] shape = new int[4];
Arrays.fill(shape, 1);
int nRows = first.size(0);
int nCols = first.size(1);
shape[0] = mats.size();
shape[1] = nChannels;
shape[2] = nRows;
shape[3] = nCols;
// for (int s = 1; s <= Math.min(nDims, 3); s++) {
// shape[s] = mat.size(s-1);
// }
blob = new Mat(shape, opencv_core.CV_32F);
var idxBlob = blob.createIndexer();
long[] indsBlob = new long[4];
int n = 0;
for (var mat : mats) {
indsBlob[0] = n++;
long[] indsMat = new long[4];
var idxMat = mat.createIndexer();
for (int r = 0; r < nRows; r++) {
indsMat[0] = r;
indsBlob[2] = r;
for (int c = 0; c < nCols; c++) {
indsMat[1] = c;
indsBlob[3] = c;
for (int channel = 0; channel < nChannels; channel++) {
indsMat[2] = channel;
indsBlob[1] = channel;
double val = idxMat.getDouble(indsMat);
idxBlob.putDouble(indsBlob, val);
}
}
}
idxMat.close();
}
idxBlob.close();
}
return blob;
}
Aggregations