Search in sources :

Example 16 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class OpenCVMLClassifier method classifyObjects.

static <T> int classifyObjects(FeatureExtractor<T> featureExtractor, OpenCVStatModel classifier, List<PathClass> pathClasses, ImageData<T> imageData, Collection<? extends PathObject> pathObjects, boolean resetExistingClass, boolean requestProbabilityEstimate) {
    if (featureExtractor == null) {
        logger.warn("No feature extractor! Cannot classify {} objects", pathObjects.size());
        return 0;
    }
    int counter = 0;
    List<Reclassifier> reclassifiers = new ArrayList<>();
    // Try not to have more than ~10 million entries per list
    int subListSize = (int) Math.max(1, Math.min(pathObjects.size(), (1024 * 1024 * 10 / featureExtractor.nFeatures())));
    Mat samples = new Mat();
    Mat results = new Mat();
    Mat probabilities = requestProbabilityEstimate ? new Mat() : null;
    // Work through the objects in chunks
    long startTime = System.currentTimeMillis();
    long lastTime = startTime;
    int nComplete = 0;
    for (var tempObjectList : Lists.partition(new ArrayList<>(pathObjects), subListSize)) {
        if (Thread.interrupted()) {
            logger.warn("Classification interrupted - will not be applied");
            return 0;
        }
        samples.create(tempObjectList.size(), featureExtractor.nFeatures(), opencv_core.CV_32FC1);
        FloatBuffer buffer = samples.createBuffer();
        featureExtractor.extractFeatures(imageData, tempObjectList, buffer);
        // Possibly log time taken
        nComplete += tempObjectList.size();
        long intermediateTime = System.currentTimeMillis();
        if (intermediateTime - lastTime > 1000L) {
            logger.debug("Calculated features for {}/{} objects in {} ms ({} ms per object, {}% complete)", nComplete, pathObjects.size(), (intermediateTime - startTime), GeneralTools.formatNumber((intermediateTime - startTime) / (double) nComplete, 2), GeneralTools.formatNumber(nComplete * 100.0 / pathObjects.size(), 1));
            lastTime = startTime;
        }
        boolean doMulticlass = classifier.supportsMulticlass();
        double threshold = 0.5;
        try {
            classifier.predict(samples, results, probabilities);
            IntIndexer idxResults = results.createIndexer();
            FloatIndexer idxProbabilities = null;
            if (probabilities != null && !probabilities.empty())
                idxProbabilities = probabilities.createIndexer();
            if (doMulticlass && idxProbabilities != null) {
                // Use probabilities if we require multiclass outputs
                long row = 0;
                // Previously .cols()
                int nCols = (int) idxProbabilities.size(2);
                List<String> classifications = new ArrayList<>();
                for (var pathObject : tempObjectList) {
                    classifications.clear();
                    for (int col = 0; col < nCols; col++) {
                        double prob = idxProbabilities.get(row, col);
                        if (prob >= threshold) {
                            var pathClass = col >= pathClasses.size() ? null : pathClasses.get(col);
                            if (pathClass != null)
                                classifications.add(pathClass.getName());
                        }
                    }
                    var pathClass = PathClassFactory.getPathClass(classifications);
                    if (PathClassTools.isIgnoredClass(pathClass)) {
                        pathClass = null;
                    }
                    if (!resetExistingClass) {
                        pathClass = PathClassTools.mergeClasses(pathObject.getPathClass(), pathClass);
                    }
                    reclassifiers.add(new Reclassifier(pathObject, pathClass, false));
                    row++;
                }
            } else {
                // Use results (indexed values) if we do not require multiclass outputs
                long row = 0;
                for (var pathObject : tempObjectList) {
                    int prediction = idxResults.get(row);
                    var pathClass = pathClasses.get(prediction);
                    double probability = idxProbabilities == null ? Double.NaN : idxProbabilities.get(row, prediction);
                    if (PathClassTools.isIgnoredClass(pathClass)) {
                        pathClass = null;
                        probability = Double.NaN;
                    }
                    if (!resetExistingClass) {
                        pathClass = PathClassTools.mergeClasses(pathObject.getPathClass(), pathClass);
                        probability = Double.NaN;
                    }
                    reclassifiers.add(new Reclassifier(pathObject, pathClass, true, probability));
                    row++;
                }
            }
            idxResults.release();
            if (idxProbabilities != null)
                idxProbabilities.release();
        } catch (Exception e) {
            logger.warn("Error with samples: {}", samples);
            logger.error(e.getLocalizedMessage(), e);
        }
        counter += tempObjectList.size();
    }
    long predictTime = System.currentTimeMillis() - startTime;
    logger.info("Prediction time: {} ms for {} objects ({} ns per object)", predictTime, pathObjects.size(), GeneralTools.formatNumber((double) predictTime / pathObjects.size() * 1000.0, 2));
    samples.close();
    results.close();
    if (probabilities != null)
        probabilities.close();
    // Apply classifications now
    reclassifiers.stream().forEach(p -> p.apply());
    return counter;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) ArrayList(java.util.ArrayList) FloatBuffer(java.nio.FloatBuffer) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) Reclassifier(qupath.lib.objects.classes.Reclassifier) IntIndexer(org.bytedeco.javacpp.indexer.IntIndexer)

Example 17 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class PCAProjectFeatureExtractor method extractFeatures.

@Override
public void extractFeatures(ImageData<T> imageData, Collection<? extends PathObject> pathObjects, FloatBuffer buffer) {
    Mat mat = new Mat(pathObjects.size(), featureExtractor.nFeatures(), opencv_core.CV_32FC1);
    FloatBuffer temp = mat.createBuffer();
    featureExtractor.extractFeatures(imageData, pathObjects, temp);
    pca.project(mat, mat);
    buffer.put(mat.createBuffer());
    mat.close();
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) FloatBuffer(java.nio.FloatBuffer)

Example 18 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class OpenCVTools method matToBufferedImage.

/**
 * Convert a Mat to a BufferedImage.
 * <p>
 * If no ColorModel is specified, a grayscale model will be used for single-channel 8-bit
 * images and RGB/ARGB for 3/4 channel 8-bit images.
 * <p>
 * For all other cases a ColorModel should be specified for meaningful display.
 *
 * @param mat
 * @param colorModel
 * @return
 */
public static BufferedImage matToBufferedImage(final Mat mat, ColorModel colorModel) {
    int type;
    int bpp = 0;
    switch(mat.depth()) {
        case opencv_core.CV_8U:
            type = DataBuffer.TYPE_BYTE;
            bpp = 8;
            break;
        case opencv_core.CV_8S:
            // Byte is unsigned
            type = DataBuffer.TYPE_SHORT;
            bpp = 16;
            break;
        case opencv_core.CV_16U:
            type = DataBuffer.TYPE_USHORT;
            bpp = 16;
            break;
        case opencv_core.CV_16S:
            type = DataBuffer.TYPE_SHORT;
            bpp = 16;
            break;
        case opencv_core.CV_32S:
            type = DataBuffer.TYPE_INT;
            bpp = 32;
            break;
        case opencv_core.CV_32F:
            type = DataBuffer.TYPE_FLOAT;
            bpp = 32;
            break;
        default:
            logger.warn("Unknown Mat depth {}, will default to CV64F ({})", mat.depth(), opencv_core.CV_64F);
        case opencv_core.CV_64F:
            type = DataBuffer.TYPE_DOUBLE;
            bpp = 64;
    }
    // Create a suitable raster
    int width = mat.cols();
    int height = mat.rows();
    int channels = mat.channels();
    // We might generate an image for a special case
    BufferedImage img = null;
    // Handle some special cases
    if (colorModel == null) {
        if (type == DataBuffer.TYPE_BYTE) {
            if (channels == 1) {
                img = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
            // TODO: Set the bytes
            } else if (channels == 3) {
                img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
            } else if (channels == 4) {
                img = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
            }
        }
    } else if (colorModel instanceof IndexColorModel) {
        img = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_INDEXED, (IndexColorModel) colorModel);
    }
    // Create the image
    WritableRaster raster;
    if (img != null) {
        raster = img.getRaster();
    } else if (colorModel != null) {
        raster = colorModel.createCompatibleWritableRaster(width, height);
        img = new BufferedImage(colorModel, raster, false, null);
    } else {
        // Create some kind of raster we can use
        var sampleModel = new BandedSampleModel(type, width, height, channels);
        raster = WritableRaster.createWritableRaster(sampleModel, null);
        // We do need a ColorModel or some description
        colorModel = ColorModelFactory.getDummyColorModel(bpp * channels);
        img = new BufferedImage(colorModel, raster, false, null);
    }
    MatVector matvector = new MatVector();
    opencv_core.split(mat, matvector);
    // We don't know which of the 3 supported array types will be needed yet...
    int[] pixelsInt = null;
    float[] pixelsFloat = null;
    double[] pixelsDouble = null;
    for (int b = 0; b < channels; b++) {
        // Extract pixels for the current channel
        Mat matChannel = matvector.get(b);
        Indexer indexer = matChannel.createIndexer();
        if (indexer instanceof UByteIndexer) {
            if (pixelsInt == null)
                pixelsInt = new int[width * height];
            ((UByteIndexer) indexer).get(0L, pixelsInt);
        } else if (indexer instanceof UShortIndexer) {
            if (pixelsInt == null)
                pixelsInt = new int[width * height];
            ((UShortIndexer) indexer).get(0L, pixelsInt);
        } else if (indexer instanceof FloatIndexer) {
            if (pixelsFloat == null)
                pixelsFloat = new float[width * height];
            ((FloatIndexer) indexer).get(0L, pixelsFloat);
        } else if (indexer instanceof DoubleIndexer) {
            if (pixelsDouble == null)
                pixelsDouble = new double[width * height];
            ((DoubleIndexer) indexer).get(0L, pixelsDouble);
        } else {
            if (pixelsDouble == null)
                pixelsDouble = new double[width * height];
            // This is inefficient, but unlikely to occur too often
            pixelsDouble = new double[width * height];
            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x++) {
                    pixelsDouble[y * width + x] = indexer.getDouble(y, x, b);
                }
            }
        }
        // Set the samples
        if (pixelsInt != null)
            raster.setSamples(0, 0, width, height, b, pixelsInt);
        else if (pixelsFloat != null)
            raster.setSamples(0, 0, width, height, b, pixelsFloat);
        else if (pixelsDouble != null)
            raster.setSamples(0, 0, width, height, b, pixelsDouble);
    }
    return img;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) UByteIndexer(org.bytedeco.javacpp.indexer.UByteIndexer) Point(org.bytedeco.opencv.opencv_core.Point) BufferedImage(java.awt.image.BufferedImage) UShortIndexer(org.bytedeco.javacpp.indexer.UShortIndexer) ByteIndexer(org.bytedeco.javacpp.indexer.ByteIndexer) IntIndexer(org.bytedeco.javacpp.indexer.IntIndexer) ShortIndexer(org.bytedeco.javacpp.indexer.ShortIndexer) DoubleIndexer(org.bytedeco.javacpp.indexer.DoubleIndexer) UByteIndexer(org.bytedeco.javacpp.indexer.UByteIndexer) UShortIndexer(org.bytedeco.javacpp.indexer.UShortIndexer) FloatIndexer(org.bytedeco.javacpp.indexer.FloatIndexer) Indexer(org.bytedeco.javacpp.indexer.Indexer) WritableRaster(java.awt.image.WritableRaster) BandedSampleModel(java.awt.image.BandedSampleModel) DoubleIndexer(org.bytedeco.javacpp.indexer.DoubleIndexer) MatVector(org.bytedeco.opencv.opencv_core.MatVector) IndexColorModel(java.awt.image.IndexColorModel)

Example 19 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class OpenCVTools method filterSingleZ.

// Alternative weighted sum code that converts to 32-bit
// static void weightedSum(List<Mat> mats, double[] weights, Mat dest) {
// boolean isFirst = true;
// for (int i = 0; i < weights.length; i++) {
// double w = weights[i];
// if (w == 0)
// continue;
// var temp = mats.get(i);
// int type = temp.depth();
// if (type != opencv_core.CV_32F && type != opencv_core.CV_64F) {
// var temp2 = new Mat();
// temp.convertTo(temp2, opencv_core.CV_32F);
// temp = temp2;
// }
// if (isFirst) {
// dest.put(opencv_core.multiply(temp, w));
// isFirst = false;
// } else
// opencv_core.scaleAdd(temp, w, dest, dest);
// if (mats.get(i) != temp)
// temp.release();
// }
// // TODO: Check this does something sensible!
// if (isFirst) {
// dest.create(mats.get(0).size(), mats.get(0).type());
// dest.put(Scalar.ZERO);
// }
// }
/**
 * Apply a filter along the 'list' dimension for a list of Mats, computing the value
 * for a single entry. This is effectively computing a weighted sum of images in the list.
 * <p>
 * Note: this method does not change the depth of the input images.
 * If a floating point output is needed, the Mats should be converted before input.
 *
 * @param mats
 * @param kernel
 * @param ind3D
 * @param border
 * @return
 */
public static Mat filterSingleZ(List<Mat> mats, double[] kernel, int ind3D, int border) {
    // Calculate weights for each image
    int n = mats.size();
    int halfSize = kernel.length / 2;
    int startInd = ind3D - halfSize;
    int endInd = startInd + kernel.length;
    double[] weights = new double[mats.size()];
    int k = 0;
    for (int i = startInd; i < endInd; i++) {
        int ind = ensureInRange(i, n, border);
        weights[ind] += kernel[k];
        k++;
    }
    Mat result = new Mat();
    weightedSum(mats, weights, result);
    return result;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) Point(org.bytedeco.opencv.opencv_core.Point)

Example 20 with Mat

use of org.bytedeco.opencv.opencv_core.Mat in project qupath by qupath.

the class OpenCVTools method imageToMatRGBorBGR.

/**
 * Extract 8-bit unsigned pixels from a BufferedImage, either as RGB(A) (default)
 * or BGR(A) (OpenCV's preferred format).
 *
 * @param img input image
 * @param doBGR if true, request BGR rather than RGB
 * @param includeAlpha if true, return any available alpha data as a 4th channel.
 * @return
 */
private static Mat imageToMatRGBorBGR(final BufferedImage img, final boolean doBGR, final boolean includeAlpha) {
    // We can request the RGB values directly
    int width = img.getWidth();
    int height = img.getHeight();
    int[] data = img.getRGB(0, 0, width, height, null, 0, img.getWidth());
    Mat mat;
    if (includeAlpha)
        mat = new Mat(height, width, opencv_core.CV_8UC4);
    else
        mat = new Mat(height, width, opencv_core.CV_8UC3);
    UByteIndexer indexer = mat.createIndexer();
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            int val = data[y * width + x];
            int r = ColorTools.red(val);
            int g = ColorTools.green(val);
            int b = ColorTools.blue(val);
            if (doBGR) {
                indexer.put(y, x, 0, b);
                indexer.put(y, x, 1, g);
                indexer.put(y, x, 2, r);
            } else {
                indexer.put(y, x, 0, r);
                indexer.put(y, x, 1, g);
                indexer.put(y, x, 2, b);
            }
            if (includeAlpha) {
                int a = ColorTools.alpha(val);
                indexer.put(y, x, 3, a);
            }
        }
    }
    indexer.release();
    return mat;
}
Also used : Mat(org.bytedeco.opencv.opencv_core.Mat) UByteIndexer(org.bytedeco.javacpp.indexer.UByteIndexer) Point(org.bytedeco.opencv.opencv_core.Point)

Aggregations

Mat (org.bytedeco.opencv.opencv_core.Mat)76 Point (org.bytedeco.opencv.opencv_core.Point)23 ArrayList (java.util.ArrayList)15 MatVector (org.bytedeco.opencv.opencv_core.MatVector)15 PointerScope (org.bytedeco.javacpp.PointerScope)14 FloatIndexer (org.bytedeco.javacpp.indexer.FloatIndexer)14 BufferedImage (java.awt.image.BufferedImage)12 IntIndexer (org.bytedeco.javacpp.indexer.IntIndexer)10 Scalar (org.bytedeco.opencv.opencv_core.Scalar)10 Test (org.junit.jupiter.api.Test)10 DoubleIndexer (org.bytedeco.javacpp.indexer.DoubleIndexer)9 Size (org.bytedeco.opencv.opencv_core.Size)9 UByteIndexer (org.bytedeco.javacpp.indexer.UByteIndexer)8 List (java.util.List)7 org.bytedeco.opencv.global.opencv_core (org.bytedeco.opencv.global.opencv_core)7 WritableRaster (java.awt.image.WritableRaster)6 IOException (java.io.IOException)6 Arrays (java.util.Arrays)6 Collections (java.util.Collections)6 Collectors (java.util.stream.Collectors)6