use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method filterSingleZ.
// Alternative weighted sum code that converts to 32-bit
// static void weightedSum(List<Mat> mats, double[] weights, Mat dest) {
// boolean isFirst = true;
// for (int i = 0; i < weights.length; i++) {
// double w = weights[i];
// if (w == 0)
// continue;
// var temp = mats.get(i);
// int type = temp.depth();
// if (type != opencv_core.CV_32F && type != opencv_core.CV_64F) {
// var temp2 = new Mat();
// temp.convertTo(temp2, opencv_core.CV_32F);
// temp = temp2;
// }
// if (isFirst) {
// dest.put(opencv_core.multiply(temp, w));
// isFirst = false;
// } else
// opencv_core.scaleAdd(temp, w, dest, dest);
// if (mats.get(i) != temp)
// temp.release();
// }
// // TODO: Check this does something sensible!
// if (isFirst) {
// dest.create(mats.get(0).size(), mats.get(0).type());
// dest.put(Scalar.ZERO);
// }
// }
/**
* Apply a filter along the 'list' dimension for a list of Mats, computing the value
* for a single entry. This is effectively computing a weighted sum of images in the list.
* <p>
* Note: this method does not change the depth of the input images.
* If a floating point output is needed, the Mats should be converted before input.
*
* @param mats
* @param kernel
* @param ind3D
* @param border
* @return
*/
public static Mat filterSingleZ(List<Mat> mats, double[] kernel, int ind3D, int border) {
// Calculate weights for each image
int n = mats.size();
int halfSize = kernel.length / 2;
int startInd = ind3D - halfSize;
int endInd = startInd + kernel.length;
double[] weights = new double[mats.size()];
int k = 0;
for (int i = startInd; i < endInd; i++) {
int ind = ensureInRange(i, n, border);
weights[ind] += kernel[k];
k++;
}
Mat result = new Mat();
weightedSum(mats, weights, result);
return result;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method matToImageProcessor.
/**
* Convert a single-channel OpenCV {@code Mat} into an ImageJ {@code ImageProcessor}.
*
* @param mat
* @return
*/
public static ImageProcessor matToImageProcessor(Mat mat) {
if (mat.channels() != 1)
throw new IllegalArgumentException("Only a single-channel Mat can be converted to an ImageProcessor! Specified Mat has " + mat.channels() + " channels");
int w = mat.cols();
int h = mat.rows();
if (mat.depth() == opencv_core.CV_32F) {
FloatIndexer indexer = mat.createIndexer();
float[] pixels = new float[w * h];
indexer.get(0L, pixels);
return new FloatProcessor(w, h, pixels);
} else if (mat.depth() == opencv_core.CV_8U) {
UByteIndexer indexer = mat.createIndexer();
int[] pixels = new int[w * h];
indexer.get(0L, pixels);
ByteProcessor bp = new ByteProcessor(w, h);
for (int i = 0; i < pixels.length; i++) bp.set(i, pixels[i]);
return bp;
} else if (mat.depth() == opencv_core.CV_16U) {
UShortIndexer indexer = mat.createIndexer();
int[] pixels = new int[w * h];
indexer.get(0L, pixels);
short[] shortPixels = new short[pixels.length];
for (int i = 0; i < pixels.length; i++) shortPixels[i] = (short) pixels[i];
// TODO: Test!
return new ShortProcessor(w, h, shortPixels, null);
} else {
Mat mat2 = new Mat();
mat.convertTo(mat2, opencv_core.CV_32F);
ImageProcessor ip = matToImageProcessor(mat2);
mat2.close();
return ip;
}
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method imageToMat.
/**
* Convert a BufferedImage to an OpenCV Mat.
* <p>
* An effort will be made to do a sensible conversion based on the BufferedImage type,
* returning a Mat with a suitable type.
* <p>
* BGR and RGB images will remain with the same channel order, and an alpha channel
* (if present) will be included at the end (i.e. to give BGRA or RGBA).
* <p>
* Note: the behavior of this method has changed; in QuPath <= 0.1.2 only
* RGB images were really supported, and an RGB conversion was *always* made.
*
* @see #imageToMatRGB
* @see #imageToMatBGR
*
* @param img
* @return
*/
public static Mat imageToMat(BufferedImage img) {
switch(img.getType()) {
case BufferedImage.TYPE_INT_BGR:
case BufferedImage.TYPE_3BYTE_BGR:
return imageToMatBGR(img, false);
case BufferedImage.TYPE_4BYTE_ABGR:
case BufferedImage.TYPE_4BYTE_ABGR_PRE:
return imageToMatBGR(img, true);
case BufferedImage.TYPE_INT_ARGB:
case BufferedImage.TYPE_INT_ARGB_PRE:
return imageToMatRGB(img, true);
case BufferedImage.TYPE_USHORT_555_RGB:
case BufferedImage.TYPE_USHORT_565_RGB:
case BufferedImage.TYPE_INT_RGB:
return imageToMatRGB(img, false);
case BufferedImage.TYPE_USHORT_GRAY:
}
int width = img.getWidth();
int height = img.getHeight();
WritableRaster raster = img.getRaster();
DataBuffer buffer = raster.getDataBuffer();
int nChannels = raster.getNumBands();
int typeCV;
switch(buffer.getDataType()) {
case DataBuffer.TYPE_BYTE:
typeCV = opencv_core.CV_8UC(nChannels);
break;
// break;
case DataBuffer.TYPE_FLOAT:
typeCV = opencv_core.CV_32FC(nChannels);
break;
case DataBuffer.TYPE_INT:
// Assuming signed int
typeCV = opencv_core.CV_32SC(nChannels);
break;
case DataBuffer.TYPE_SHORT:
typeCV = opencv_core.CV_16SC(nChannels);
break;
case DataBuffer.TYPE_USHORT:
typeCV = opencv_core.CV_16UC(nChannels);
break;
default:
// Assume 64-bit is as flexible as we can manage
typeCV = opencv_core.CV_64FC(nChannels);
}
// Create a new Mat & put the pixels
Mat mat = new Mat(height, width, typeCV, Scalar.ZERO);
putPixels(raster, mat);
return mat;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method filterZ.
/**
* Filter filter along entries in the input list.
* <p>
* If each Mat in the list can be considered a consecutive 2D image plane from a z-stack,
* this can be considered filtering along the z-dimension.
*
* @param mats
* @param kernelZ
* @param ind3D if -1, return filtered results for all mats, otherwise only return results for the mat at the specified ind3D
* @param border
* @return
*/
public static List<Mat> filterZ(List<Mat> mats, Mat kernelZ, int ind3D, int border) {
/*
* We can avoid the rigmarole of applying the full filtering
* by instead simply calculating the weighted sum corresponding to the convolution
* around the z-slice of interest only.
*/
// ind3D >= 0;
boolean doWeightedSums = true;
if (doWeightedSums) {
// Extract kernel values
int ks = (int) kernelZ.total();
double[] kernelArray = new double[ks];
DoubleIndexer idx = kernelZ.createIndexer();
idx.get(0L, kernelArray);
idx.release();
if (ind3D >= 0) {
// Calculate weights for each image
Mat result = filterSingleZ(mats, kernelArray, ind3D, border);
return Arrays.asList(result);
} else {
List<Mat> output = new ArrayList<>();
for (int i = 0; i < mats.size(); i++) {
Mat result = filterSingleZ(mats, kernelArray, i, border);
output.add(result);
}
return output;
}
}
// Create a an array of images reshaped as column vectors
Mat[] columns = new Mat[mats.size()];
int nRows = 0;
for (int i = 0; i < mats.size(); i++) {
Mat mat = mats.get(i);
nRows = mat.rows();
columns[i] = mat.reshape(mat.channels(), mat.rows() * mat.cols());
}
// Concatenate columns, effectively meaning z dimension now along rows
Mat matConcatZ = new Mat();
opencv_core.hconcat(new MatVector(columns), matConcatZ);
// Apply z filtering along rows
if (kernelZ.rows() > 1)
kernelZ = kernelZ.t().asMat();
// Mat empty = new Mat(1, 1, opencv_core.CV_64FC1, Scalar.ONE);
// opencv_imgproc.sepFilter2D(matConcatZ, matConcatZ, opencv_core.CV_32F, kernelZ, empty, null, 0.0, border);
opencv_imgproc.filter2D(matConcatZ, matConcatZ, opencv_core.CV_32F, kernelZ, null, 0.0, border);
int start = 0;
int end = mats.size();
if (ind3D >= 0) {
start = ind3D;
end = ind3D + 1;
}
// Reshape to create output list
List<Mat> output = new ArrayList<>();
for (int i = start; i < end; i++) {
output.add(matConcatZ.col(i).clone().reshape(matConcatZ.channels(), nRows));
}
return output;
}
use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.
the class OpenCVTools method createDisk.
/**
* Create a disk filter.
* This is a rasterized approximation of a filled circle with the specified radius.
*
* @param radius radius of the disk; must be > 0
* @param doMean if true, normalize kernel by dividing by the sum of all elements.
* If false, all 'inside' elements are 1 and all 'outside' elements are 0.
* @return a Mat of size {@code radius*2+1} that depicts a filled circle
* @implNote this uses a distance transform, and tends to get more predictable results than {@link #getCircularStructuringElement(int)}.
* Internally, expensive computations are reduced by caching previously calculated filters and returning only a clone.
*/
public static Mat createDisk(int radius, boolean doMean) {
if (radius <= 0)
throw new IllegalArgumentException("Radius must be > 0");
Map<Integer, Mat> cache = doMean ? cachedMeanDisks : cachedSumDisks;
Mat kernel = cache.get(radius);
if (kernel != null) {
synchronized (kernel) {
// This may happen if the kernel was created in a PointerScope and deallocate() was called
if (!kernel.isNull())
return kernel.clone();
}
}
kernel = new Mat();
var kernelCenter = new Mat(radius * 2 + 1, radius * 2 + 1, opencv_core.CV_8UC1, Scalar.WHITE);
try (UByteIndexer idxKernel = kernelCenter.createIndexer()) {
idxKernel.put(radius, radius, 0);
}
opencv_imgproc.distanceTransform(kernelCenter, kernel, opencv_imgproc.DIST_L2, opencv_imgproc.DIST_MASK_PRECISE);
opencv_imgproc.threshold(kernel, kernel, radius, 1, opencv_imgproc.THRESH_BINARY_INV);
if (doMean) {
// Count nonzero pixels
double sum = opencv_core.sumElems(kernel).get();
opencv_core.dividePut(kernel, sum);
}
// Try to keep reference, even if called within a PointerScope
kernel.retainReference();
cache.put(radius, kernel);
return kernel.clone();
}
Aggregations