Search in sources :

Example 6 with TranslationModel2D

use of mpicbg.trakem2.transform.TranslationModel2D in project TrakEM2 by trakem2.

the class AlignLayersTask method alignLayersNonLinearlyJob.

public static final void alignLayersNonLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
    // will reverse order if necessary
    final List<Layer> layerRange = layerSet.getLayers(first, last);
    final Align.Param p = Align.param.clone();
    // Remove all empty layers
    for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
        if (!it.next().contains(Patch.class, true)) {
            it.remove();
        }
    }
    if (0 == layerRange.size()) {
        Utils.log("No layers in range show any images!");
        return;
    }
    /* do not work if there is only one layer selected */
    if (layerRange.size() < 2)
        return;
    final List<Patch> all = new ArrayList<Patch>();
    for (final Layer la : layerRange) {
        for (final Patch patch : la.getAll(Patch.class)) {
            if (null != filter && !filter.accept(patch))
                continue;
            all.add(patch);
        }
    }
    AlignTask.transformPatchesAndVectorData(all, new Runnable() {

        @Override
        public void run() {
            // ///
            final Loader loader = layerSet.getProject().getLoader();
            // Not concurrent safe! So two copies, one per layer and Thread:
            final SIFT ijSIFT1 = new SIFT(new FloatArray2DSIFT(p.sift));
            final SIFT ijSIFT2 = new SIFT(new FloatArray2DSIFT(p.sift));
            final Collection<Feature> features1 = new ArrayList<Feature>();
            final Collection<Feature> features2 = new ArrayList<Feature>();
            final List<PointMatch> candidates = new ArrayList<PointMatch>();
            final List<PointMatch> inliers = new ArrayList<PointMatch>();
            final int n_proc = Runtime.getRuntime().availableProcessors() > 1 ? 2 : 1;
            final ExecutorService exec = Utils.newFixedThreadPool(n_proc, "alignLayersNonLinearly");
            List<Patch> previousPatches = null;
            int s = 0;
            for (int i = 1; i < layerRange.size(); ++i) {
                if (Thread.currentThread().isInterrupted())
                    break;
                final Layer layer1 = layerRange.get(i - 1);
                final Layer layer2 = layerRange.get(i);
                final long t0 = System.currentTimeMillis();
                features1.clear();
                features2.clear();
                final Rectangle box1 = null == fov ? layer1.getMinimalBoundingBox(Patch.class, true) : fov;
                final Rectangle box2 = null == fov ? layer2.getMinimalBoundingBox(Patch.class, true) : fov;
                /* calculate the common scale factor for both flat images */
                final double scale = Math.min(1.0f, (double) p.sift.maxOctaveSize / (double) Math.max(box1.width, Math.max(box1.height, Math.max(box2.width, box2.height))));
                final List<Patch> patches1;
                if (null == previousPatches) {
                    patches1 = layer1.getAll(Patch.class);
                    if (null != filter) {
                        for (final Iterator<Patch> it = patches1.iterator(); it.hasNext(); ) {
                            if (!filter.accept(it.next()))
                                it.remove();
                        }
                    }
                } else {
                    patches1 = previousPatches;
                }
                final List<Patch> patches2 = layer2.getAll(Patch.class);
                if (null != filter) {
                    for (final Iterator<Patch> it = patches2.iterator(); it.hasNext(); ) {
                        if (!filter.accept(it.next()))
                            it.remove();
                    }
                }
                final Future<ImageProcessor> fu1 = exec.submit(new Callable<ImageProcessor>() {

                    @Override
                    public ImageProcessor call() {
                        final ImageProcessor ip1 = loader.getFlatImage(layer1, box1, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches1, true).getProcessor();
                        ijSIFT1.extractFeatures(ip1, features1);
                        Utils.log(features1.size() + " features extracted in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
                        return ip1;
                    }
                });
                final Future<ImageProcessor> fu2 = exec.submit(new Callable<ImageProcessor>() {

                    @Override
                    public ImageProcessor call() {
                        final ImageProcessor ip2 = loader.getFlatImage(layer2, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches2, true).getProcessor();
                        ijSIFT2.extractFeatures(ip2, features2);
                        Utils.log(features2.size() + " features extracted in layer \"" + layer2.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
                        return ip2;
                    }
                });
                final ImageProcessor ip1, ip2;
                try {
                    ip1 = fu1.get();
                    ip2 = fu2.get();
                } catch (final Exception e) {
                    IJError.print(e);
                    return;
                }
                if (features1.size() > 0 && features2.size() > 0) {
                    final long t1 = System.currentTimeMillis();
                    candidates.clear();
                    FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
                    final AbstractAffineModel2D<?> model;
                    switch(p.expectedModelIndex) {
                        case 0:
                            model = new TranslationModel2D();
                            break;
                        case 1:
                            model = new RigidModel2D();
                            break;
                        case 2:
                            model = new SimilarityModel2D();
                            break;
                        case 3:
                            model = new AffineModel2D();
                            break;
                        default:
                            return;
                    }
                    boolean modelFound;
                    boolean again = false;
                    try {
                        do {
                            again = false;
                            modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                            if (modelFound && p.rejectIdentity) {
                                final ArrayList<Point> points = new ArrayList<Point>();
                                PointMatch.sourcePoints(inliers, points);
                                if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                                    IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                                    candidates.removeAll(inliers);
                                    inliers.clear();
                                    again = true;
                                }
                            }
                        } while (again);
                    } catch (final NotEnoughDataPointsException e) {
                        modelFound = false;
                    }
                    if (modelFound) {
                        IJ.log("Model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - t1) + " ms");
                        final ImagePlus imp1 = new ImagePlus("target", ip1);
                        final ImagePlus imp2 = new ImagePlus("source", ip2);
                        final List<Point> sourcePoints = new ArrayList<Point>();
                        final List<Point> targetPoints = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, sourcePoints);
                        PointMatch.targetPoints(inliers, targetPoints);
                        imp2.setRoi(Util.pointsToPointRoi(sourcePoints));
                        imp1.setRoi(Util.pointsToPointRoi(targetPoints));
                        final ImageProcessor mask1 = ip1.duplicate();
                        mask1.threshold(1);
                        final ImageProcessor mask2 = ip2.duplicate();
                        mask2.threshold(1);
                        final Transformation warp = bUnwarpJ_.computeTransformationBatch(imp2, imp1, mask2, mask1, elasticParam);
                        final CubicBSplineTransform transf = new CubicBSplineTransform();
                        transf.set(warp.getIntervals(), warp.getDirectDeformationCoefficientsX(), warp.getDirectDeformationCoefficientsY(), imp2.getWidth(), imp2.getHeight());
                        final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
                        // Transform desired patches only
                        for (final Patch patch : patches2) {
                            try {
                                final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
                                final AffineTransform at = patch.getAffineTransform();
                                final AffineTransform pat = new AffineTransform();
                                pat.scale(scale, scale);
                                pat.translate(-box2.x, -box2.y);
                                pat.concatenate(at);
                                pat.translate(-pbox.x, -pbox.y);
                                final mpicbg.trakem2.transform.AffineModel2D toWorld = new mpicbg.trakem2.transform.AffineModel2D();
                                toWorld.set(pat);
                                final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
                                // move the patch into the global space where bUnwarpJ calculated the transformation
                                ctl.add(toWorld);
                                // Apply non-linear transformation
                                ctl.add(transf);
                                // move it back
                                ctl.add(toWorld.createInverse());
                                patch.appendCoordinateTransform(ctl);
                                fus.add(patch.updateMipMaps());
                                // Compensate for offset between boxes
                                final AffineTransform offset = new AffineTransform();
                                offset.translate(box1.x - box2.x, box1.y - box2.y);
                                offset.concatenate(at);
                                patch.setAffineTransform(offset);
                            } catch (final Exception e) {
                                e.printStackTrace();
                            }
                        }
                        // await regeneration of all mipmaps
                        Utils.wait(fus);
                        Display.repaint(layer2);
                    } else
                        IJ.log("No model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                }
                IJ.showProgress(++s, layerRange.size());
                // for next iteration
                previousPatches = patches2;
            }
            exec.shutdown();
            if (propagateTransform)
                Utils.log("Propagation not implemented yet for non-linear layer alignment.");
        /* // CANNOT be done (at least not trivially:
		 * //an appropriate "scale" cannot be computed, and the box2 is part of the spline computation.
		if ( propagateTransform && null != lastTransform )
		{
			for (final Layer la : l.getParent().getLayers(last > first ? last +1 : first -1, last > first ? l.getParent().size() -1 : 0)) {
				// Transform visible patches only
				final Rectangle box2 = la.getMinimalBoundingBox( Patch.class, true );
				for ( final Displayable disp : la.getDisplayables( Patch.class, true ) )
				{
					// ...
				}
			}
		}
		*/
        }
    });
// end of transformPatchesAndVectorData
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Transformation(bunwarpj.Transformation) CoordinateTransformList(mpicbg.trakem2.transform.CoordinateTransformList) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) Loader(ini.trakem2.persistence.Loader) Feature(mpicbg.imagefeatures.Feature) Callable(java.util.concurrent.Callable) ImageProcessor(ij.process.ImageProcessor) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) Iterator(java.util.Iterator) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) CoordinateTransformList(mpicbg.trakem2.transform.CoordinateTransformList) ArrayList(java.util.ArrayList) List(java.util.List) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) Layer(ini.trakem2.display.Layer) ImagePlus(ij.ImagePlus) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) IllDefinedDataPointsException(mpicbg.models.IllDefinedDataPointsException) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) ExecutorService(java.util.concurrent.ExecutorService) Collection(java.util.Collection) Future(java.util.concurrent.Future) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) CubicBSplineTransform(bunwarpj.trakem2.transform.CubicBSplineTransform) Patch(ini.trakem2.display.Patch) CoordinateTransform(mpicbg.trakem2.transform.CoordinateTransform)

Example 7 with TranslationModel2D

use of mpicbg.trakem2.transform.TranslationModel2D in project TrakEM2 by trakem2.

the class StitchingTEM method montageWithPhaseCorrelation.

/**
 * Perform montage based on phase correlation
 * @param col collection of patches
 * @param param phase correlation parameters
 */
public static void montageWithPhaseCorrelation(final Collection<Patch> col, final PhaseCorrelationParam param) {
    if (null == col || col.size() < 1)
        return;
    final ArrayList<Patch> al = new ArrayList<Patch>(col);
    final ArrayList<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
    final ArrayList<AbstractAffineTile2D<?>> fixed_tiles = new ArrayList<AbstractAffineTile2D<?>>();
    for (final Patch p : al) {
        // Pre-check: just a warning
        final int aff_type = p.getAffineTransform().getType();
        switch(p.getAffineTransform().getType()) {
            case AffineTransform.TYPE_IDENTITY:
            case AffineTransform.TYPE_TRANSLATION:
                // ok
                break;
            default:
                Utils.log2("WARNING: patch with a non-translation transform: " + p);
                break;
        }
        // create tiles
        final TranslationTile2D tile = new TranslationTile2D(new TranslationModel2D(), p);
        tiles.add(tile);
        if (p.isLocked2()) {
            Utils.log("Added fixed (locked) tile " + p);
            fixed_tiles.add(tile);
        }
    }
    // Get acceptable values
    double cc_scale = param.cc_scale;
    if (cc_scale < 0 || cc_scale > 1) {
        Utils.log("Unacceptable cc_scale of " + param.cc_scale + ". Using 1 instead.");
        cc_scale = 1;
    }
    float overlap = param.overlap;
    if (overlap < 0 || overlap > 1) {
        Utils.log("Unacceptable overlap of " + param.overlap + ". Using 1 instead.");
        overlap = 1;
    }
    for (int i = 0; i < al.size(); i++) {
        final Patch p1 = al.get(i);
        final Rectangle r1 = p1.getBoundingBox();
        // find overlapping, add as connections
        for (int j = i + 1; j < al.size(); j++) {
            if (Thread.currentThread().isInterrupted())
                return;
            final Patch p2 = al.get(j);
            final Rectangle r2 = p2.getBoundingBox();
            if (r1.intersects(r2)) {
                // Skip if it's a diagonal overlap
                final int dx = Math.abs(r1.x - r2.x);
                final int dy = Math.abs(r1.y - r2.y);
                if (dx > r1.width / 2 && dy > r1.height / 2) {
                    // skip diagonal match
                    Utils.log2("Skipping diagonal overlap between " + p1 + " and " + p2);
                    continue;
                }
                p1.getProject().getLoader().releaseToFit((long) (p1.getWidth() * p1.getHeight() * 25));
                final double[] R;
                if (1 == overlap) {
                    R = correlate(p1, p2, overlap, cc_scale, TOP_BOTTOM, 0, 0, param.min_R);
                    if (SUCCESS == R[2]) {
                        addMatches(tiles.get(i), tiles.get(j), R[0], R[1]);
                    }
                } else {
                    switch(getClosestOverlapLocation(p1, p2)) {
                        case // p1 overlaps p2 from the left
                        0:
                            R = correlate(p1, p2, overlap, cc_scale, LEFT_RIGHT, 0, 0, param.min_R);
                            if (SUCCESS == R[2]) {
                                addMatches(tiles.get(i), tiles.get(j), R[0], R[1]);
                            }
                            break;
                        case // p1 overlaps p2 from the top
                        1:
                            R = correlate(p1, p2, overlap, cc_scale, TOP_BOTTOM, 0, 0, param.min_R);
                            if (SUCCESS == R[2]) {
                                addMatches(tiles.get(i), tiles.get(j), R[0], R[1]);
                            }
                            break;
                        case // p1 overlaps p2 from the right
                        2:
                            R = correlate(p2, p1, overlap, cc_scale, LEFT_RIGHT, 0, 0, param.min_R);
                            if (SUCCESS == R[2]) {
                                addMatches(tiles.get(j), tiles.get(i), R[0], R[1]);
                            }
                            break;
                        case // p1 overlaps p2 from the bottom
                        3:
                            R = correlate(p2, p1, overlap, cc_scale, TOP_BOTTOM, 0, 0, param.min_R);
                            if (SUCCESS == R[2]) {
                                addMatches(tiles.get(j), tiles.get(i), R[0], R[1]);
                            }
                            break;
                        default:
                            Utils.log("Unknown overlap direction!");
                            continue;
                    }
                }
            }
        }
    }
    if (param.remove_disconnected || param.hide_disconnected) {
        for (final Iterator<AbstractAffineTile2D<?>> it = tiles.iterator(); it.hasNext(); ) {
            final AbstractAffineTile2D<?> t = it.next();
            if (null != t.getMatches() && t.getMatches().isEmpty()) {
                if (param.hide_disconnected)
                    t.getPatch().setVisible(false);
                else if (param.remove_disconnected)
                    t.getPatch().remove(false);
                it.remove();
            }
        }
    }
    // Optimize tile configuration by removing bad matches
    optimizeTileConfiguration(tiles, fixed_tiles, param);
    for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.getModel().createAffine());
    try {
        Display.repaint(al.get(0).getLayer());
    } catch (final Exception e) {
    }
}
Also used : TranslationTile2D(mpicbg.trakem2.align.TranslationTile2D) AbstractAffineTile2D(mpicbg.trakem2.align.AbstractAffineTile2D) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) Point(mpicbg.models.Point) TranslationModel2D(mpicbg.models.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 8 with TranslationModel2D

use of mpicbg.trakem2.transform.TranslationModel2D in project TrakEM2 by trakem2.

the class Align method alignLayersLinearly.

/**
 * Align a range of layers by accumulating pairwise alignments of contiguous layers.
 *
 * @param layers The range of layers to align pairwise.
 * @param numThreads The number of threads to use.
 * @param filter The {@link Filter} to decide which {@link Patch} instances to use in each {@link Layer}. Can be null.
 */
public static final void alignLayersLinearly(final List<Layer> layers, final int numThreads, final Filter<Patch> filter) {
    param.sift.maxOctaveSize = 1600;
    if (!param.setup("Align layers linearly"))
        return;
    final Rectangle box = layers.get(0).getParent().getMinimalBoundingBox(Patch.class);
    final double scale = Math.min(1.0, Math.min((double) param.sift.maxOctaveSize / box.width, (double) param.sift.maxOctaveSize / box.height));
    final Param p = param.clone();
    p.maxEpsilon *= scale;
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(p.sift);
    final SIFT ijSIFT = new SIFT(sift);
    Rectangle box1 = null;
    Rectangle box2 = null;
    final Collection<Feature> features1 = new ArrayList<Feature>();
    final Collection<Feature> features2 = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    final AffineTransform a = new AffineTransform();
    int i = 0;
    for (final Layer l : layers) {
        long s = System.currentTimeMillis();
        features1.clear();
        features1.addAll(features2);
        features2.clear();
        final Rectangle box3 = l.getMinimalBoundingBox(Patch.class);
        if (box3 == null)
            continue;
        box1 = box2;
        box2 = box3;
        final List<Patch> patches = l.getAll(Patch.class);
        if (null != filter) {
            for (final Iterator<Patch> it = patches.iterator(); it.hasNext(); ) {
                if (!filter.accept(it.next()))
                    it.remove();
            }
        }
        ijSIFT.extractFeatures(l.getProject().getLoader().getFlatImage(l, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches, true).getProcessor(), features2);
        Utils.log(features2.size() + " features extracted in layer \"" + l.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
        if (features1.size() > 0) {
            s = System.currentTimeMillis();
            candidates.clear();
            FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
            final AbstractAffineModel2D<?> model;
            switch(p.expectedModelIndex) {
                case 0:
                    model = new TranslationModel2D();
                    break;
                case 1:
                    model = new RigidModel2D();
                    break;
                case 2:
                    model = new SimilarityModel2D();
                    break;
                case 3:
                    model = new AffineModel2D();
                    break;
                default:
                    return;
            }
            boolean modelFound;
            boolean again = false;
            try {
                do {
                    again = false;
                    modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                    if (modelFound && p.rejectIdentity) {
                        final ArrayList<Point> points = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, points);
                        if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                            Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
                            candidates.removeAll(inliers);
                            inliers.clear();
                            again = true;
                        }
                    }
                } while (again);
            } catch (final NotEnoughDataPointsException e) {
                modelFound = false;
            }
            if (modelFound) {
                Utils.log("Model found for layer \"" + l.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - s) + " ms");
                final AffineTransform b = new AffineTransform();
                b.translate(box1.x, box1.y);
                b.scale(1.0f / scale, 1.0f / scale);
                b.concatenate(model.createAffine());
                b.scale(scale, scale);
                b.translate(-box2.x, -box2.y);
                a.concatenate(b);
                l.apply(Displayable.class, a);
                Display.repaint(l);
            } else {
                Utils.log("No model found for layer \"" + l.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                a.setToIdentity();
            }
        }
        IJ.showProgress(++i, layers.size());
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Rectangle(java.awt.Rectangle) ArrayList(java.util.ArrayList) Feature(mpicbg.imagefeatures.Feature) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) InterpolatedAffineModel2D(mpicbg.models.InterpolatedAffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 9 with TranslationModel2D

use of mpicbg.trakem2.transform.TranslationModel2D in project TrakEM2 by trakem2.

the class Align method alignTileCollections.

/**
 * Align two collections of tiles
 * @param p
 * @param a
 * @param b
 */
public static final void alignTileCollections(final Param p, final Collection<AbstractAffineTile2D<?>> a, final Collection<AbstractAffineTile2D<?>> b) {
    final ArrayList<Patch> pa = new ArrayList<Patch>();
    final ArrayList<Patch> pb = new ArrayList<Patch>();
    for (final AbstractAffineTile2D<?> t : a) pa.add(t.getPatch());
    for (final AbstractAffineTile2D<?> t : b) pb.add(t.getPatch());
    final Layer la = pa.iterator().next().getLayer();
    final Layer lb = pb.iterator().next().getLayer();
    final Rectangle boxA = Displayable.getBoundingBox(pa, null);
    final Rectangle boxB = Displayable.getBoundingBox(pb, null);
    final double scale = Math.min(1.0, Math.min(Math.min((double) p.sift.maxOctaveSize / boxA.width, (double) p.sift.maxOctaveSize / boxA.height), Math.min((double) p.sift.maxOctaveSize / boxB.width, (double) p.sift.maxOctaveSize / boxB.height)));
    final Param pp = p.clone();
    pp.maxEpsilon *= scale;
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(pp.sift);
    final SIFT ijSIFT = new SIFT(sift);
    final Collection<Feature> featuresA = new ArrayList<Feature>();
    final Collection<Feature> featuresB = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    long s = System.currentTimeMillis();
    ijSIFT.extractFeatures(la.getProject().getLoader().getFlatImage(la, boxA, scale, 0xffffffff, ImagePlus.GRAY8, null, pa, true, Color.GRAY).getProcessor(), featuresA);
    Utils.log(featuresA.size() + " features extracted in graph A in layer \"" + la.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    s = System.currentTimeMillis();
    ijSIFT.extractFeatures(lb.getProject().getLoader().getFlatImage(lb, boxB, scale, 0xffffffff, ImagePlus.GRAY8, null, pb, true, Color.GRAY).getProcessor(), featuresB);
    Utils.log(featuresB.size() + " features extracted in graph B in layer \"" + lb.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    if (featuresA.size() > 0 && featuresB.size() > 0) {
        s = System.currentTimeMillis();
        FeatureTransform.matchFeatures(featuresA, featuresB, candidates, pp.rod);
        final AbstractAffineModel2D<?> model;
        switch(p.expectedModelIndex) {
            case 0:
                model = new TranslationModel2D();
                break;
            case 1:
                model = new RigidModel2D();
                break;
            case 2:
                model = new SimilarityModel2D();
                break;
            case 3:
                model = new AffineModel2D();
                break;
            default:
                return;
        }
        boolean modelFound;
        boolean again = false;
        try {
            do {
                again = false;
                modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                if (modelFound && p.rejectIdentity) {
                    final ArrayList<Point> points = new ArrayList<Point>();
                    PointMatch.sourcePoints(inliers, points);
                    if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                        Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
                        candidates.removeAll(inliers);
                        inliers.clear();
                        again = true;
                    }
                }
            } while (again);
        } catch (final NotEnoughDataPointsException e) {
            modelFound = false;
        }
        if (modelFound) {
            Utils.log("Model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - s) + " ms");
            final AffineTransform at = new AffineTransform();
            at.translate(boxA.x, boxA.y);
            at.scale(1.0f / scale, 1.0f / scale);
            at.concatenate(model.createAffine());
            at.scale(scale, scale);
            at.translate(-boxB.x, -boxB.y);
            for (final Patch t : pa) t.preTransform(at, false);
            Display.repaint(la);
        } else
            Utils.log("No model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) Feature(mpicbg.imagefeatures.Feature) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) InterpolatedAffineModel2D(mpicbg.models.InterpolatedAffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 10 with TranslationModel2D

use of mpicbg.trakem2.transform.TranslationModel2D in project TrakEM2 by trakem2.

the class AlignLayersTask method alignLayersLinearlyJob.

public static final void alignLayersLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
    // will reverse order if necessary
    final List<Layer> layerRange = layerSet.getLayers(first, last);
    final Align.Param p = Align.param.clone();
    // find the first non-empty layer, and remove all empty layers
    Rectangle box = fov;
    for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
        final Layer la = it.next();
        if (!la.contains(Patch.class, true)) {
            it.remove();
            continue;
        }
        if (null == box) {
            // The first layer:
            // Only for visible patches
            box = la.getMinimalBoundingBox(Patch.class, true);
        }
    }
    if (0 == layerRange.size()) {
        Utils.log("All layers in range are empty!");
        return;
    }
    /* do not work if there is only one layer selected */
    if (layerRange.size() < 2)
        return;
    final double scale = Math.min(1.0, Math.min((double) p.sift.maxOctaveSize / (double) box.width, (double) p.sift.maxOctaveSize / (double) box.height));
    p.maxEpsilon *= scale;
    p.identityTolerance *= scale;
    // Utils.log2("scale: " + scale + "  maxOctaveSize: " + p.sift.maxOctaveSize + "  box: " + box.width + "," + box.height);
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(p.sift);
    final SIFT ijSIFT = new SIFT(sift);
    Rectangle box1 = fov;
    Rectangle box2 = fov;
    final Collection<Feature> features1 = new ArrayList<Feature>();
    final Collection<Feature> features2 = new ArrayList<Feature>();
    final List<PointMatch> candidates = new ArrayList<PointMatch>();
    final List<PointMatch> inliers = new ArrayList<PointMatch>();
    final AffineTransform a = new AffineTransform();
    int s = 0;
    for (final Layer layer : layerRange) {
        if (Thread.currentThread().isInterrupted())
            return;
        final long t0 = System.currentTimeMillis();
        features1.clear();
        features1.addAll(features2);
        features2.clear();
        final Rectangle box3 = layer.getMinimalBoundingBox(Patch.class, true);
        // skipping empty layer
        if (box3 == null || (box3.width == 0 && box3.height == 0))
            continue;
        box1 = null == fov ? box2 : fov;
        box2 = null == fov ? box3 : fov;
        final List<Patch> patches = layer.getAll(Patch.class);
        if (null != filter) {
            for (final Iterator<Patch> it = patches.iterator(); it.hasNext(); ) {
                if (!filter.accept(it.next()))
                    it.remove();
            }
        }
        final ImageProcessor flatImage = layer.getProject().getLoader().getFlatImage(layer, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches, true).getProcessor();
        ijSIFT.extractFeatures(flatImage, features2);
        IJ.log(features2.size() + " features extracted in layer \"" + layer.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
        if (features1.size() > 0) {
            final long t1 = System.currentTimeMillis();
            candidates.clear();
            FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
            final AbstractAffineModel2D<?> model;
            switch(p.expectedModelIndex) {
                case 0:
                    model = new TranslationModel2D();
                    break;
                case 1:
                    model = new RigidModel2D();
                    break;
                case 2:
                    model = new SimilarityModel2D();
                    break;
                case 3:
                    model = new AffineModel2D();
                    break;
                default:
                    return;
            }
            final AbstractAffineModel2D<?> desiredModel;
            switch(p.desiredModelIndex) {
                case 0:
                    desiredModel = new TranslationModel2D();
                    break;
                case 1:
                    desiredModel = new RigidModel2D();
                    break;
                case 2:
                    desiredModel = new SimilarityModel2D();
                    break;
                case 3:
                    desiredModel = new AffineModel2D();
                    break;
                default:
                    return;
            }
            boolean modelFound;
            boolean again = false;
            try {
                do {
                    again = false;
                    modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                    if (modelFound && p.rejectIdentity) {
                        final ArrayList<Point> points = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, points);
                        if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                            IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                            candidates.removeAll(inliers);
                            inliers.clear();
                            again = true;
                        }
                    }
                } while (again);
                if (modelFound)
                    desiredModel.fit(inliers);
            } catch (final NotEnoughDataPointsException e) {
                modelFound = false;
            } catch (final IllDefinedDataPointsException e) {
                modelFound = false;
            }
            if (Thread.currentThread().isInterrupted())
                return;
            if (modelFound) {
                IJ.log("Model found for layer \"" + layer.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - t1) + " ms");
                final AffineTransform b = new AffineTransform();
                b.translate(box1.x, box1.y);
                b.scale(1.0f / scale, 1.0f / scale);
                b.concatenate(desiredModel.createAffine());
                b.scale(scale, scale);
                b.translate(-box2.x, -box2.y);
                a.concatenate(b);
                AlignTask.transformPatchesAndVectorData(patches, a);
                Display.repaint(layer);
            } else {
                IJ.log("No model found for layer \"" + layer.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                a.setToIdentity();
            }
        }
        IJ.showProgress(++s, layerRange.size());
    }
    if (Thread.currentThread().isInterrupted())
        return;
    if (propagateTransform) {
        if (last > first && last < layerSet.size() - 2)
            for (final Layer la : layerSet.getLayers(last + 1, layerSet.size() - 1)) {
                if (Thread.currentThread().isInterrupted())
                    return;
                AlignTask.transformPatchesAndVectorData(la, a);
            }
        else if (first > last && last > 0)
            for (final Layer la : layerSet.getLayers(0, last - 1)) {
                if (Thread.currentThread().isInterrupted())
                    return;
                AlignTask.transformPatchesAndVectorData(la, a);
            }
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Rectangle(java.awt.Rectangle) ArrayList(java.util.ArrayList) Feature(mpicbg.imagefeatures.Feature) ImageProcessor(ij.process.ImageProcessor) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) IllDefinedDataPointsException(mpicbg.models.IllDefinedDataPointsException) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Aggregations

ArrayList (java.util.ArrayList)11 Point (mpicbg.models.Point)10 AffineModel2D (mpicbg.models.AffineModel2D)9 PointMatch (mpicbg.models.PointMatch)9 SimilarityModel2D (mpicbg.models.SimilarityModel2D)9 Patch (ini.trakem2.display.Patch)8 Rectangle (java.awt.Rectangle)8 NotEnoughDataPointsException (mpicbg.models.NotEnoughDataPointsException)8 AbstractAffineModel2D (mpicbg.models.AbstractAffineModel2D)7 Layer (ini.trakem2.display.Layer)6 RigidModel2D (mpicbg.trakem2.transform.RigidModel2D)6 TranslationModel2D (mpicbg.trakem2.transform.TranslationModel2D)6 AffineTransform (java.awt.geom.AffineTransform)5 SIFT (mpicbg.ij.SIFT)5 Feature (mpicbg.imagefeatures.Feature)5 FloatArray2DSIFT (mpicbg.imagefeatures.FloatArray2DSIFT)5 TranslationModel2D (mpicbg.models.TranslationModel2D)5 RigidModel2D (mpicbg.models.RigidModel2D)3 Triple (mpicbg.trakem2.util.Triple)3 ImageProcessor (ij.process.ImageProcessor)2