Search in sources :

Example 6 with Feature

use of mpicbg.imagefeatures.Feature in project TrakEM2 by trakem2.

the class AlignmentUtils method extractAndSaveLayerFeatures.

/**
 * Extract SIFT features and save them into the project folder.
 *
 * @param layerRange the list of layers to be aligned
 * @param box a rectangular region of interest that will be used for alignment
 * @param scale scale factor <= 1.0
 * @param filter a name based filter for Patches (can be null)
 * @param siftParam SIFT extraction parameters
 * @param clearCache
 * @param numThreads
 * @throws ExecutionException
 * @throws InterruptedException
 */
protected static final void extractAndSaveLayerFeatures(final List<Layer> layerRange, final Rectangle box, final double scale, final Filter<Patch> filter, final FloatArray2DSIFT.Param siftParam, final boolean clearCache, final int numThreads) throws ExecutionException, InterruptedException {
    final long sTime = System.currentTimeMillis();
    final ExecutorService exec = ExecutorProvider.getExecutorService(1.0f / (float) numThreads);
    /* extract features for all slices and store them to disk */
    final AtomicInteger counter = new AtomicInteger(0);
    final ArrayList<Future<ArrayList<Feature>>> siftTasks = new ArrayList<Future<ArrayList<Feature>>>();
    for (final Layer layer : layerRange) {
        siftTasks.add(exec.submit(new LayerFeatureCallable(layer, box, scale, filter, siftParam, clearCache)));
    }
    /* join */
    try {
        for (final Future<ArrayList<Feature>> fu : siftTasks) {
            IJ.showProgress(counter.getAndIncrement(), layerRange.size() - 1);
            fu.get();
        }
    } catch (final InterruptedException e) {
        Utils.log("Feature extraction interrupted.");
        IJError.print(e);
        siftTasks.clear();
        // exec.shutdownNow();
        throw e;
    } catch (final ExecutionException e) {
        Utils.log("Execution exception during feature extraction.");
        IJError.print(e);
        siftTasks.clear();
        // exec.shutdownNow();
        throw e;
    }
    siftTasks.clear();
    IJ.log("Extracted features in " + (System.currentTimeMillis() - sTime) + "ms");
// exec.shutdown();
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) ArrayList(java.util.ArrayList) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) Feature(mpicbg.imagefeatures.Feature) Layer(ini.trakem2.display.Layer)

Example 7 with Feature

use of mpicbg.imagefeatures.Feature in project TrakEM2 by trakem2.

the class Align method serializeFeatures.

protected static final boolean serializeFeatures(final Param p, final AbstractAffineTile2D<?> t, final Collection<Feature> f) {
    final ArrayList<Feature> list = new ArrayList<Feature>();
    list.addAll(f);
    final Patch patch = t.getPatch();
    final Loader loader = patch.getProject().getLoader();
    final Features fe = new Features(p.sift, list);
    return loader.serialize(fe, new StringBuilder(loader.getUNUIdFolder()).append("features.ser/").append(FSLoader.createIdPath(Long.toString(patch.getId()), "features", ".ser")).toString());
}
Also used : ArrayList(java.util.ArrayList) Loader(ini.trakem2.persistence.Loader) FSLoader(ini.trakem2.persistence.FSLoader) Feature(mpicbg.imagefeatures.Feature) Patch(ini.trakem2.display.Patch)

Example 8 with Feature

use of mpicbg.imagefeatures.Feature in project TrakEM2 by trakem2.

the class Align method alignLayersLinearly.

/**
 * Align a range of layers by accumulating pairwise alignments of contiguous layers.
 *
 * @param layers The range of layers to align pairwise.
 * @param numThreads The number of threads to use.
 * @param filter The {@link Filter} to decide which {@link Patch} instances to use in each {@link Layer}. Can be null.
 */
public static final void alignLayersLinearly(final List<Layer> layers, final int numThreads, final Filter<Patch> filter) {
    param.sift.maxOctaveSize = 1600;
    if (!param.setup("Align layers linearly"))
        return;
    final Rectangle box = layers.get(0).getParent().getMinimalBoundingBox(Patch.class);
    final double scale = Math.min(1.0, Math.min((double) param.sift.maxOctaveSize / box.width, (double) param.sift.maxOctaveSize / box.height));
    final Param p = param.clone();
    p.maxEpsilon *= scale;
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(p.sift);
    final SIFT ijSIFT = new SIFT(sift);
    Rectangle box1 = null;
    Rectangle box2 = null;
    final Collection<Feature> features1 = new ArrayList<Feature>();
    final Collection<Feature> features2 = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    final AffineTransform a = new AffineTransform();
    int i = 0;
    for (final Layer l : layers) {
        long s = System.currentTimeMillis();
        features1.clear();
        features1.addAll(features2);
        features2.clear();
        final Rectangle box3 = l.getMinimalBoundingBox(Patch.class);
        if (box3 == null)
            continue;
        box1 = box2;
        box2 = box3;
        final List<Patch> patches = l.getAll(Patch.class);
        if (null != filter) {
            for (final Iterator<Patch> it = patches.iterator(); it.hasNext(); ) {
                if (!filter.accept(it.next()))
                    it.remove();
            }
        }
        ijSIFT.extractFeatures(l.getProject().getLoader().getFlatImage(l, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches, true).getProcessor(), features2);
        Utils.log(features2.size() + " features extracted in layer \"" + l.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
        if (features1.size() > 0) {
            s = System.currentTimeMillis();
            candidates.clear();
            FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
            final AbstractAffineModel2D<?> model;
            switch(p.expectedModelIndex) {
                case 0:
                    model = new TranslationModel2D();
                    break;
                case 1:
                    model = new RigidModel2D();
                    break;
                case 2:
                    model = new SimilarityModel2D();
                    break;
                case 3:
                    model = new AffineModel2D();
                    break;
                default:
                    return;
            }
            boolean modelFound;
            boolean again = false;
            try {
                do {
                    again = false;
                    modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                    if (modelFound && p.rejectIdentity) {
                        final ArrayList<Point> points = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, points);
                        if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                            Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
                            candidates.removeAll(inliers);
                            inliers.clear();
                            again = true;
                        }
                    }
                } while (again);
            } catch (final NotEnoughDataPointsException e) {
                modelFound = false;
            }
            if (modelFound) {
                Utils.log("Model found for layer \"" + l.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - s) + " ms");
                final AffineTransform b = new AffineTransform();
                b.translate(box1.x, box1.y);
                b.scale(1.0f / scale, 1.0f / scale);
                b.concatenate(model.createAffine());
                b.scale(scale, scale);
                b.translate(-box2.x, -box2.y);
                a.concatenate(b);
                l.apply(Displayable.class, a);
                Display.repaint(l);
            } else {
                Utils.log("No model found for layer \"" + l.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                a.setToIdentity();
            }
        }
        IJ.showProgress(++i, layers.size());
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Rectangle(java.awt.Rectangle) ArrayList(java.util.ArrayList) Feature(mpicbg.imagefeatures.Feature) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) InterpolatedAffineModel2D(mpicbg.models.InterpolatedAffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 9 with Feature

use of mpicbg.imagefeatures.Feature in project TrakEM2 by trakem2.

the class Align method alignTileCollections.

/**
 * Align two collections of tiles
 * @param p
 * @param a
 * @param b
 */
public static final void alignTileCollections(final Param p, final Collection<AbstractAffineTile2D<?>> a, final Collection<AbstractAffineTile2D<?>> b) {
    final ArrayList<Patch> pa = new ArrayList<Patch>();
    final ArrayList<Patch> pb = new ArrayList<Patch>();
    for (final AbstractAffineTile2D<?> t : a) pa.add(t.getPatch());
    for (final AbstractAffineTile2D<?> t : b) pb.add(t.getPatch());
    final Layer la = pa.iterator().next().getLayer();
    final Layer lb = pb.iterator().next().getLayer();
    final Rectangle boxA = Displayable.getBoundingBox(pa, null);
    final Rectangle boxB = Displayable.getBoundingBox(pb, null);
    final double scale = Math.min(1.0, Math.min(Math.min((double) p.sift.maxOctaveSize / boxA.width, (double) p.sift.maxOctaveSize / boxA.height), Math.min((double) p.sift.maxOctaveSize / boxB.width, (double) p.sift.maxOctaveSize / boxB.height)));
    final Param pp = p.clone();
    pp.maxEpsilon *= scale;
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(pp.sift);
    final SIFT ijSIFT = new SIFT(sift);
    final Collection<Feature> featuresA = new ArrayList<Feature>();
    final Collection<Feature> featuresB = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    long s = System.currentTimeMillis();
    ijSIFT.extractFeatures(la.getProject().getLoader().getFlatImage(la, boxA, scale, 0xffffffff, ImagePlus.GRAY8, null, pa, true, Color.GRAY).getProcessor(), featuresA);
    Utils.log(featuresA.size() + " features extracted in graph A in layer \"" + la.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    s = System.currentTimeMillis();
    ijSIFT.extractFeatures(lb.getProject().getLoader().getFlatImage(lb, boxB, scale, 0xffffffff, ImagePlus.GRAY8, null, pb, true, Color.GRAY).getProcessor(), featuresB);
    Utils.log(featuresB.size() + " features extracted in graph B in layer \"" + lb.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    if (featuresA.size() > 0 && featuresB.size() > 0) {
        s = System.currentTimeMillis();
        FeatureTransform.matchFeatures(featuresA, featuresB, candidates, pp.rod);
        final AbstractAffineModel2D<?> model;
        switch(p.expectedModelIndex) {
            case 0:
                model = new TranslationModel2D();
                break;
            case 1:
                model = new RigidModel2D();
                break;
            case 2:
                model = new SimilarityModel2D();
                break;
            case 3:
                model = new AffineModel2D();
                break;
            default:
                return;
        }
        boolean modelFound;
        boolean again = false;
        try {
            do {
                again = false;
                modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                if (modelFound && p.rejectIdentity) {
                    final ArrayList<Point> points = new ArrayList<Point>();
                    PointMatch.sourcePoints(inliers, points);
                    if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                        Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
                        candidates.removeAll(inliers);
                        inliers.clear();
                        again = true;
                    }
                }
            } while (again);
        } catch (final NotEnoughDataPointsException e) {
            modelFound = false;
        }
        if (modelFound) {
            Utils.log("Model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - s) + " ms");
            final AffineTransform at = new AffineTransform();
            at.translate(boxA.x, boxA.y);
            at.scale(1.0f / scale, 1.0f / scale);
            at.concatenate(model.createAffine());
            at.scale(scale, scale);
            at.translate(-boxB.x, -boxB.y);
            for (final Patch t : pa) t.preTransform(at, false);
            Display.repaint(la);
        } else
            Utils.log("No model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) Feature(mpicbg.imagefeatures.Feature) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) InterpolatedAffineModel2D(mpicbg.models.InterpolatedAffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 10 with Feature

use of mpicbg.imagefeatures.Feature in project TrakEM2 by trakem2.

the class AlignLayersTask method alignLayersLinearlyJob.

public static final void alignLayersLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
    // will reverse order if necessary
    final List<Layer> layerRange = layerSet.getLayers(first, last);
    final Align.Param p = Align.param.clone();
    // find the first non-empty layer, and remove all empty layers
    Rectangle box = fov;
    for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
        final Layer la = it.next();
        if (!la.contains(Patch.class, true)) {
            it.remove();
            continue;
        }
        if (null == box) {
            // The first layer:
            // Only for visible patches
            box = la.getMinimalBoundingBox(Patch.class, true);
        }
    }
    if (0 == layerRange.size()) {
        Utils.log("All layers in range are empty!");
        return;
    }
    /* do not work if there is only one layer selected */
    if (layerRange.size() < 2)
        return;
    final double scale = Math.min(1.0, Math.min((double) p.sift.maxOctaveSize / (double) box.width, (double) p.sift.maxOctaveSize / (double) box.height));
    p.maxEpsilon *= scale;
    p.identityTolerance *= scale;
    // Utils.log2("scale: " + scale + "  maxOctaveSize: " + p.sift.maxOctaveSize + "  box: " + box.width + "," + box.height);
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(p.sift);
    final SIFT ijSIFT = new SIFT(sift);
    Rectangle box1 = fov;
    Rectangle box2 = fov;
    final Collection<Feature> features1 = new ArrayList<Feature>();
    final Collection<Feature> features2 = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    final AffineTransform a = new AffineTransform();
    int s = 0;
    for (final Layer layer : layerRange) {
        if (Thread.currentThread().isInterrupted())
            return;
        final long t0 = System.currentTimeMillis();
        features1.clear();
        features1.addAll(features2);
        features2.clear();
        final Rectangle box3 = layer.getMinimalBoundingBox(Patch.class, true);
        // skipping empty layer
        if (box3 == null || (box3.width == 0 && box3.height == 0))
            continue;
        box1 = null == fov ? box2 : fov;
        box2 = null == fov ? box3 : fov;
        final List<Patch> patches = layer.getAll(Patch.class);
        if (null != filter) {
            for (final Iterator<Patch> it = patches.iterator(); it.hasNext(); ) {
                if (!filter.accept(it.next()))
                    it.remove();
            }
        }
        final ImageProcessor flatImage = layer.getProject().getLoader().getFlatImage(layer, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches, true).getProcessor();
        ijSIFT.extractFeatures(flatImage, features2);
        IJ.log(features2.size() + " features extracted in layer \"" + layer.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
        if (features1.size() > 0) {
            final long t1 = System.currentTimeMillis();
            candidates.clear();
            FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
            final AbstractAffineModel2D<?> model;
            switch(p.expectedModelIndex) {
                case 0:
                    model = new TranslationModel2D();
                    break;
                case 1:
                    model = new RigidModel2D();
                    break;
                case 2:
                    model = new SimilarityModel2D();
                    break;
                case 3:
                    model = new AffineModel2D();
                    break;
                default:
                    return;
            }
            final AbstractAffineModel2D<?> desiredModel;
            switch(p.desiredModelIndex) {
                case 0:
                    desiredModel = new TranslationModel2D();
                    break;
                case 1:
                    desiredModel = new RigidModel2D();
                    break;
                case 2:
                    desiredModel = new SimilarityModel2D();
                    break;
                case 3:
                    desiredModel = new AffineModel2D();
                    break;
                default:
                    return;
            }
            boolean modelFound;
            boolean again = false;
            try {
                do {
                    again = false;
                    modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                    if (modelFound && p.rejectIdentity) {
                        final ArrayList<Point> points = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, points);
                        if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                            IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                            candidates.removeAll(inliers);
                            inliers.clear();
                            again = true;
                        }
                    }
                } while (again);
                if (modelFound)
                    desiredModel.fit(inliers);
            } catch (final NotEnoughDataPointsException e) {
                modelFound = false;
            } catch (final IllDefinedDataPointsException e) {
                modelFound = false;
            }
            if (Thread.currentThread().isInterrupted())
                return;
            if (modelFound) {
                IJ.log("Model found for layer \"" + layer.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - t1) + " ms");
                final AffineTransform b = new AffineTransform();
                b.translate(box1.x, box1.y);
                b.scale(1.0f / scale, 1.0f / scale);
                b.concatenate(desiredModel.createAffine());
                b.scale(scale, scale);
                b.translate(-box2.x, -box2.y);
                a.concatenate(b);
                AlignTask.transformPatchesAndVectorData(patches, a);
                Display.repaint(layer);
            } else {
                IJ.log("No model found for layer \"" + layer.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                a.setToIdentity();
            }
        }
        IJ.showProgress(++s, layerRange.size());
    }
    if (Thread.currentThread().isInterrupted())
        return;
    if (propagateTransform) {
        if (last > first && last < layerSet.size() - 2)
            for (final Layer la : layerSet.getLayers(last + 1, layerSet.size() - 1)) {
                if (Thread.currentThread().isInterrupted())
                    return;
                AlignTask.transformPatchesAndVectorData(la, a);
            }
        else if (first > last && last > 0)
            for (final Layer la : layerSet.getLayers(0, last - 1)) {
                if (Thread.currentThread().isInterrupted())
                    return;
                AlignTask.transformPatchesAndVectorData(la, a);
            }
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Rectangle(java.awt.Rectangle) ArrayList(java.util.ArrayList) Feature(mpicbg.imagefeatures.Feature) ImageProcessor(ij.process.ImageProcessor) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) IllDefinedDataPointsException(mpicbg.models.IllDefinedDataPointsException) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Aggregations

Feature (mpicbg.imagefeatures.Feature)11 ArrayList (java.util.ArrayList)10 SIFT (mpicbg.ij.SIFT)8 FloatArray2DSIFT (mpicbg.imagefeatures.FloatArray2DSIFT)8 Patch (ini.trakem2.display.Patch)6 Point (mpicbg.models.Point)6 Layer (ini.trakem2.display.Layer)5 Rectangle (java.awt.Rectangle)5 AffineTransform (java.awt.geom.AffineTransform)5 AbstractAffineModel2D (mpicbg.models.AbstractAffineModel2D)5 AffineModel2D (mpicbg.models.AffineModel2D)5 NotEnoughDataPointsException (mpicbg.models.NotEnoughDataPointsException)5 PointMatch (mpicbg.models.PointMatch)5 SimilarityModel2D (mpicbg.models.SimilarityModel2D)5 RigidModel2D (mpicbg.trakem2.transform.RigidModel2D)5 TranslationModel2D (mpicbg.trakem2.transform.TranslationModel2D)5 Loader (ini.trakem2.persistence.Loader)3 ExecutorService (java.util.concurrent.ExecutorService)3 Future (java.util.concurrent.Future)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3