use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class WatershedNucleiCV method watershedDistanceTransformSplit.
private static void watershedDistanceTransformSplit(Mat matBinary, int maxFilterRadius) {
Mat matWatershedSeedsBinary;
// Create a background mask
Mat matBackground = new Mat();
compare(matBinary, new Mat(1, 1, CV_32FC1, Scalar.WHITE), matBackground, CMP_NE);
// Separate by shape using the watershed transform
Mat matDistanceTransform = new Mat();
opencv_imgproc.distanceTransform(matBinary, matDistanceTransform, opencv_imgproc.CV_DIST_L2, opencv_imgproc.CV_DIST_MASK_PRECISE);
// Find local maxima
matWatershedSeedsBinary = new Mat();
opencv_imgproc.dilate(matDistanceTransform, matWatershedSeedsBinary, OpenCVTools.getCircularStructuringElement(maxFilterRadius));
compare(matDistanceTransform, matWatershedSeedsBinary, matWatershedSeedsBinary, CMP_EQ);
matWatershedSeedsBinary.setTo(new Mat(1, 1, matWatershedSeedsBinary.type(), Scalar.ZERO), matBackground);
// Dilate slightly to merge nearby maxima
opencv_imgproc.dilate(matWatershedSeedsBinary, matWatershedSeedsBinary, OpenCVTools.getCircularStructuringElement(2));
// Create labels for watershed
Mat matLabels = new Mat(matDistanceTransform.size(), CV_32F, Scalar.ZERO);
OpenCVTools.labelImage(matWatershedSeedsBinary, matLabels, opencv_imgproc.RETR_CCOMP);
// Remove everything outside the thresholded region
matLabels.setTo(new Mat(1, 1, matLabels.type(), Scalar.ZERO), matBackground);
// Do watershed
// 8-connectivity is essential for the watershed lines to be preserved - otherwise OpenCV's findContours could not be used
ProcessingCV.doWatershed(matDistanceTransform, matLabels, 0.1, true);
// Update the binary image to remove the watershed lines
multiply(matBinary, matLabels, matBinary, 1, matBinary.type());
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DnnTools method blobFromImages.
/**
* Create an OpenCV blob from a batch of Mats with optional scaling, resizing and cropping.
* @param mats input images
* @param scaleFactor scale factor
* @param size input width and height
* @param mean mean values for subtraction
* @param swapRB swap red and blue of the mean values
* @param crop center crop after resizing if needed
* @return a blob with axis order NCHW
*/
public static Mat blobFromImages(Collection<Mat> mats, double scaleFactor, Size size, Scalar mean, boolean swapRB, boolean crop) {
// if (mat.depth() != opencv_core.CV_32F) {
// var mat2 = new Mat();
// mat.convertTo(mat2, opencv_core.CV_32F);
// mat2 = mat;
// }
Mat blob = null;
Mat first = mats.iterator().next();
int nChannels = first.channels();
if (nChannels == 1 || nChannels == 3 || nChannels == 4) {
if (mats.size() == 1)
blob = opencv_dnn.blobFromImage(first, scaleFactor, size, mean, swapRB, crop, opencv_core.CV_32F);
else
blob = opencv_dnn.blobFromImages(new MatVector(mats.toArray(Mat[]::new)), scaleFactor, size, mean, swapRB, crop, opencv_core.CV_32F);
} else {
// TODO: Don't have any net to test this with currently...
logger.warn("Attempting to reshape an image with " + nChannels + " channels - this may not work! " + "Only 1, 3 and 4 full supported, preprocessing will be ignored.");
// Blob is a 4D Tensor [NCHW]
int[] shape = new int[4];
Arrays.fill(shape, 1);
int nRows = first.size(0);
int nCols = first.size(1);
shape[0] = mats.size();
shape[1] = nChannels;
shape[2] = nRows;
shape[3] = nCols;
// for (int s = 1; s <= Math.min(nDims, 3); s++) {
// shape[s] = mat.size(s-1);
// }
blob = new Mat(shape, opencv_core.CV_32F);
var idxBlob = blob.createIndexer();
long[] indsBlob = new long[4];
int n = 0;
for (var mat : mats) {
indsBlob[0] = n++;
long[] indsMat = new long[4];
var idxMat = mat.createIndexer();
for (int r = 0; r < nRows; r++) {
indsMat[0] = r;
indsBlob[2] = r;
for (int c = 0; c < nCols; c++) {
indsMat[1] = c;
indsBlob[3] = c;
for (int channel = 0; channel < nChannels; channel++) {
indsMat[2] = channel;
indsBlob[1] = channel;
double val = idxMat.getDouble(indsMat);
idxBlob.putDouble(indsBlob, val);
}
}
}
idxMat.close();
}
idxBlob.close();
}
return blob;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class DefaultBlobFunction method toBlob.
@Override
public Mat toBlob(Mat... mats) {
// Preprocess the images
var preprocessed = new Mat[mats.length];
int ind = 0;
for (var mat : mats) {
var temp = mat.clone();
if (preprocessing != null) {
temp = preprocessing.apply(temp);
}
if (inputSize != null) {
if (crop) {
int w = inputSize.width();
int h = inputSize.height();
double factor = Math.max(w / (double) temp.cols(), h / (double) temp.rows());
opencv_imgproc.resize(temp, temp, new Size(), factor, factor, opencv_imgproc.INTER_LINEAR);
int x = (temp.cols() - w) / 2;
int y = (temp.rows() - h) / 2;
temp.put(OpenCVTools.crop(temp, x, y, w, h));
} else {
opencv_imgproc.resize(temp, temp, inputSize, 0, 0, opencv_imgproc.INTER_LINEAR);
}
}
preprocessed[ind] = temp;
ind++;
}
// Convert images to blob
return DnnTools.blobFromImages(preprocessed);
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVMLClassifier method classifyObjects.
static <T> int classifyObjects(FeatureExtractor<T> featureExtractor, OpenCVStatModel classifier, List<PathClass> pathClasses, ImageData<T> imageData, Collection<? extends PathObject> pathObjects, boolean resetExistingClass, boolean requestProbabilityEstimate) {
if (featureExtractor == null) {
logger.warn("No feature extractor! Cannot classify {} objects", pathObjects.size());
return 0;
}
int counter = 0;
List<Reclassifier> reclassifiers = new ArrayList<>();
// Try not to have more than ~10 million entries per list
int subListSize = (int) Math.max(1, Math.min(pathObjects.size(), (1024 * 1024 * 10 / featureExtractor.nFeatures())));
Mat samples = new Mat();
Mat results = new Mat();
Mat probabilities = requestProbabilityEstimate ? new Mat() : null;
// Work through the objects in chunks
long startTime = System.currentTimeMillis();
long lastTime = startTime;
int nComplete = 0;
for (var tempObjectList : Lists.partition(new ArrayList<>(pathObjects), subListSize)) {
if (Thread.interrupted()) {
logger.warn("Classification interrupted - will not be applied");
return 0;
}
samples.create(tempObjectList.size(), featureExtractor.nFeatures(), opencv_core.CV_32FC1);
FloatBuffer buffer = samples.createBuffer();
featureExtractor.extractFeatures(imageData, tempObjectList, buffer);
// Possibly log time taken
nComplete += tempObjectList.size();
long intermediateTime = System.currentTimeMillis();
if (intermediateTime - lastTime > 1000L) {
logger.debug("Calculated features for {}/{} objects in {} ms ({} ms per object, {}% complete)", nComplete, pathObjects.size(), (intermediateTime - startTime), GeneralTools.formatNumber((intermediateTime - startTime) / (double) nComplete, 2), GeneralTools.formatNumber(nComplete * 100.0 / pathObjects.size(), 1));
lastTime = startTime;
}
boolean doMulticlass = classifier.supportsMulticlass();
double threshold = 0.5;
try {
classifier.predict(samples, results, probabilities);
IntIndexer idxResults = results.createIndexer();
FloatIndexer idxProbabilities = null;
if (probabilities != null && !probabilities.empty())
idxProbabilities = probabilities.createIndexer();
if (doMulticlass && idxProbabilities != null) {
// Use probabilities if we require multiclass outputs
long row = 0;
// Previously .cols()
int nCols = (int) idxProbabilities.size(2);
List<String> classifications = new ArrayList<>();
for (var pathObject : tempObjectList) {
classifications.clear();
for (int col = 0; col < nCols; col++) {
double prob = idxProbabilities.get(row, col);
if (prob >= threshold) {
var pathClass = col >= pathClasses.size() ? null : pathClasses.get(col);
if (pathClass != null)
classifications.add(pathClass.getName());
}
}
var pathClass = PathClassFactory.getPathClass(classifications);
if (PathClassTools.isIgnoredClass(pathClass)) {
pathClass = null;
}
if (!resetExistingClass) {
pathClass = PathClassTools.mergeClasses(pathObject.getPathClass(), pathClass);
}
reclassifiers.add(new Reclassifier(pathObject, pathClass, false));
row++;
}
} else {
// Use results (indexed values) if we do not require multiclass outputs
long row = 0;
for (var pathObject : tempObjectList) {
int prediction = idxResults.get(row);
var pathClass = pathClasses.get(prediction);
double probability = idxProbabilities == null ? Double.NaN : idxProbabilities.get(row, prediction);
if (PathClassTools.isIgnoredClass(pathClass)) {
pathClass = null;
probability = Double.NaN;
}
if (!resetExistingClass) {
pathClass = PathClassTools.mergeClasses(pathObject.getPathClass(), pathClass);
probability = Double.NaN;
}
reclassifiers.add(new Reclassifier(pathObject, pathClass, true, probability));
row++;
}
}
idxResults.release();
if (idxProbabilities != null)
idxProbabilities.release();
} catch (Exception e) {
logger.warn("Error with samples: {}", samples);
logger.error(e.getLocalizedMessage(), e);
}
counter += tempObjectList.size();
}
long predictTime = System.currentTimeMillis() - startTime;
logger.info("Prediction time: {} ms for {} objects ({} ns per object)", predictTime, pathObjects.size(), GeneralTools.formatNumber((double) predictTime / pathObjects.size() * 1000.0, 2));
samples.close();
results.close();
if (probabilities != null)
probabilities.close();
// Apply classifications now
reclassifiers.stream().forEach(p -> p.apply());
return counter;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class PCAProjectFeatureExtractor method extractFeatures.
@Override
public void extractFeatures(ImageData<T> imageData, Collection<? extends PathObject> pathObjects, FloatBuffer buffer) {
Mat mat = new Mat(pathObjects.size(), featureExtractor.nFeatures(), opencv_core.CV_32FC1);
FloatBuffer temp = mat.createBuffer();
featureExtractor.extractFeatures(imageData, pathObjects, temp);
pca.project(mat, mat);
buffer.put(mat.createBuffer());
mat.close();
}
Aggregations