use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method shrinkLabels.
/**
* Shrink labels to a single point.
* This works by effectively iterating through each label, and retaining only the labeled pixel that is closest to the centroid
* of all pixels with the same label - setting all other pixels within the component to zero.
*
* @param mat label mat (must be CV_32S)
* @return the labeled image, with only one pixel per label greater than zero
*/
public static Mat shrinkLabels(Mat mat) {
if (mat.channels() != 1)
throw new IllegalArgumentException("shrinkLabels requires a single-channel mat, but input has " + mat.channels() + " channels");
var points = labelsToPoints(mat);
var mat2 = new Mat(mat.rows(), mat.cols(), mat.type(), Scalar.ZERO);
try (IntIndexer idx2 = mat2.createIndexer()) {
for (var p : points) {
idx2.putDouble(p.inds, p.getValue());
}
}
return mat2;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class TestOpenCVTools method testScalar.
@Test
public void testScalar() {
double[] values = new double[] { -100, 123.4, 0, -0, 12023.423 };
try (var scope = new PointerScope()) {
// Test floats
for (int type : new int[] { opencv_core.CV_32F, opencv_core.CV_64F }) {
for (double value : values) {
Mat mat = OpenCVTools.scalarMat(value, type);
assertEquals(1, mat.rows());
assertEquals(1, mat.cols());
assertEquals(1, mat.channels());
assertEquals(value, mat.createIndexer().getDouble(0L), 1e-3);
}
}
// Test floats with channels
for (int c : new int[] { 1, 2, 3, 4, 5, 6, 7, 8 }) {
for (double value : values) {
Mat mat = OpenCVTools.scalarMatWithType(value, opencv_core.CV_32FC(c));
assertEquals(1, mat.rows());
assertEquals(1, mat.cols());
assertEquals(c, mat.channels());
double[] doubles = new double[c];
Arrays.fill(doubles, value);
assertArrayEquals(doubles, OpenCVTools.extractDoubles(mat), 1e-3);
}
}
// Test unsigned integers
for (int type : new int[] { opencv_core.CV_8U }) {
for (double value : values) {
Mat mat = OpenCVTools.scalarMat(value, type);
assertEquals(1, mat.rows());
assertEquals(1, mat.cols());
assertEquals(1, mat.channels());
double val = Math.min(255, Math.max(0, Math.round(value)));
assertEquals(val, mat.createIndexer().getDouble(0L));
}
}
// Test signed integers
for (int type : new int[] { opencv_core.CV_32S }) {
for (double value : values) {
Mat mat = OpenCVTools.scalarMat(value, type);
assertEquals(1, mat.rows());
assertEquals(1, mat.cols());
assertEquals(1, mat.channels());
double val = Math.round(value);
assertEquals(val, mat.createIndexer().getDouble(0L));
}
}
}
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class TestOpenCVTools method testPercentiles.
@Test
public void testPercentiles() {
int[] minValues = { -2, 0, 1 };
int[] maxValues = { 1, 10, 101 };
opencv_core.setRNGSeed(100);
for (int min : minValues) {
for (int max : maxValues) {
var values = IntStream.range(min, max + 1).asDoubleStream().toArray();
var stats = new DescriptiveStatistics(values);
var mat = new Mat(values);
opencv_core.randShuffle(mat);
assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
assertEquals(max, OpenCVTools.maximum(mat));
assertEquals(min, OpenCVTools.minimum(mat));
assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
double[] newValues = new double[values.length + 30];
Arrays.fill(newValues, Double.NaN);
System.arraycopy(values, 0, newValues, 0, values.length);
mat.close();
mat = new Mat(newValues);
opencv_core.randShuffle(mat);
assertEquals(stats.getPercentile(50), OpenCVTools.median(mat));
assertEquals((min + max) / 2.0, OpenCVTools.median(mat));
assertEquals(max, OpenCVTools.maximum(mat));
assertEquals(min, OpenCVTools.minimum(mat));
assertArrayEquals(new double[] { min, stats.getPercentile(50), max }, OpenCVTools.percentiles(mat, 1e-9, 50, 100));
mat.close();
}
}
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method matToImagePlus.
/**
* Convert an OpenCV {@code MatVector} into an ImageJ {@code ImagePlus}.
*
* @param title
* @param mats
* @return
*/
public static ImagePlus matToImagePlus(String title, Mat... mats) {
ImageStack stack = null;
int nChannels = 1;
for (Mat mat : mats) {
if (stack == null) {
stack = new ImageStack(mat.cols(), mat.rows());
} else if (mat.channels() != nChannels) {
throw new IllegalArgumentException("Number of channels must be the same for all Mats!");
}
if (mat.channels() == 1) {
ImageProcessor ip = matToImageProcessor(mat);
stack.addSlice(ip);
} else {
nChannels = mat.channels();
MatVector split = new MatVector();
opencv_core.split(mat, split);
for (int c = 0; c < split.size(); c++) stack.addSlice(matToImageProcessor(split.get(c)));
}
}
ImagePlus imp = new ImagePlus(title, stack);
imp.setDimensions(nChannels, mats.length, 1);
return nChannels == 1 ? imp : new CompositeImage(imp);
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method filterZ.
/**
* Filter filter along entries in the input list.
* <p>
* If each Mat in the list can be considered a consecutive 2D image plane from a z-stack,
* this can be considered filtering along the z-dimension.
*
* @param mats
* @param kernelZ
* @param ind3D if -1, return filtered results for all mats, otherwise only return results for the mat at the specified ind3D
* @param border
* @return
*/
public static List<Mat> filterZ(List<Mat> mats, Mat kernelZ, int ind3D, int border) {
/*
* We can avoid the rigmarole of applying the full filtering
* by instead simply calculating the weighted sum corresponding to the convolution
* around the z-slice of interest only.
*/
// ind3D >= 0;
boolean doWeightedSums = true;
if (doWeightedSums) {
// Extract kernel values
int ks = (int) kernelZ.total();
double[] kernelArray = new double[ks];
DoubleIndexer idx = kernelZ.createIndexer();
idx.get(0L, kernelArray);
idx.release();
if (ind3D >= 0) {
// Calculate weights for each image
Mat result = filterSingleZ(mats, kernelArray, ind3D, border);
return Arrays.asList(result);
} else {
List<Mat> output = new ArrayList<>();
for (int i = 0; i < mats.size(); i++) {
Mat result = filterSingleZ(mats, kernelArray, i, border);
output.add(result);
}
return output;
}
}
// Create a an array of images reshaped as column vectors
Mat[] columns = new Mat[mats.size()];
int nRows = 0;
for (int i = 0; i < mats.size(); i++) {
Mat mat = mats.get(i);
nRows = mat.rows();
columns[i] = mat.reshape(mat.channels(), mat.rows() * mat.cols());
}
// Concatenate columns, effectively meaning z dimension now along rows
Mat matConcatZ = new Mat();
opencv_core.hconcat(new MatVector(columns), matConcatZ);
// Apply z filtering along rows
if (kernelZ.rows() > 1)
kernelZ = kernelZ.t().asMat();
// Mat empty = new Mat(1, 1, opencv_core.CV_64FC1, Scalar.ONE);
// opencv_imgproc.sepFilter2D(matConcatZ, matConcatZ, opencv_core.CV_32F, kernelZ, empty, null, 0.0, border);
opencv_imgproc.filter2D(matConcatZ, matConcatZ, opencv_core.CV_32F, kernelZ, null, 0.0, border);
int start = 0;
int end = mats.size();
if (ind3D >= 0) {
start = ind3D;
end = ind3D + 1;
}
// Reshape to create output list
List<Mat> output = new ArrayList<>();
for (int i = start; i < end; i++) {
output.add(matConcatZ.col(i).clone().reshape(matConcatZ.channels(), nRows));
}
return output;
}
Aggregations