Search in sources :

Example 11 with Displayable

use of ini.trakem2.display.Displayable in project TrakEM2 by trakem2.

the class AlignTask method alignGraphs.

private static final boolean alignGraphs(final Align.Param p, final Layer layer1, final Layer layer2, final Iterable<Tile<?>> graph1, final Iterable<Tile<?>> graph2) {
    final Align.Param cp = p.clone();
    final Selection selection1 = new Selection(null);
    for (final Tile<?> tile : graph1) selection1.add(((AbstractAffineTile2D<?>) tile).getPatch());
    final Rectangle graph1Box = selection1.getBox();
    final Selection selection2 = new Selection(null);
    for (final Tile<?> tile : graph2) selection2.add(((AbstractAffineTile2D<?>) tile).getPatch());
    final Rectangle graph2Box = selection2.getBox();
    final int maxLength = Math.max(Math.max(Math.max(graph1Box.width, graph1Box.height), graph2Box.width), graph2Box.height);
    // final double scale = ( double )cp.sift.maxOctaveSize / maxLength;
    /* rather ad hoc but we cannot just scale this to maxOctaveSize */
    cp.sift.maxOctaveSize = Math.min(maxLength, 2 * p.sift.maxOctaveSize);
    /* make sure that, despite rounding issues from scale, it is >= image size */
    final double scale = (double) (cp.sift.maxOctaveSize - 1) / maxLength;
    // cp.maxEpsilon *= scale;
    final FloatArray2DSIFT sift = new FloatArray2DSIFT(cp.sift);
    final SIFT ijSIFT = new SIFT(sift);
    final ArrayList<Feature> features1 = new ArrayList<Feature>();
    final ArrayList<Feature> features2 = new ArrayList<Feature>();
    final ArrayList<PointMatch> candidates = new ArrayList<PointMatch>();
    final ArrayList<PointMatch> inliers = new ArrayList<PointMatch>();
    long s = System.currentTimeMillis();
    ijSIFT.extractFeatures(layer1.getProject().getLoader().getFlatImage(layer1, graph1Box, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, selection1.getSelected(Patch.class), false, Color.GRAY).getProcessor(), features1);
    Utils.log(features1.size() + " features extracted for graphs in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    ijSIFT.extractFeatures(layer2.getProject().getLoader().getFlatImage(layer2, graph2Box, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, selection2.getSelected(Patch.class), false, Color.GRAY).getProcessor(), features2);
    Utils.log(features2.size() + " features extracted for graphs in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
    boolean modelFound = false;
    if (features1.size() > 0 && features2.size() > 0) {
        s = System.currentTimeMillis();
        FeatureTransform.matchFeatures(features1, features2, candidates, cp.rod);
        final AbstractAffineModel2D<?> model;
        switch(cp.expectedModelIndex) {
            case 0:
                model = new TranslationModel2D();
                break;
            case 1:
                model = new RigidModel2D();
                break;
            case 2:
                model = new SimilarityModel2D();
                break;
            case 3:
                model = new AffineModel2D();
                break;
            default:
                return false;
        }
        boolean again = false;
        try {
            do {
                again = false;
                modelFound = model.filterRansac(candidates, inliers, 1000, cp.maxEpsilon, cp.minInlierRatio, cp.minNumInliers, 3);
                if (modelFound && cp.rejectIdentity) {
                    final ArrayList<Point> points = new ArrayList<Point>();
                    PointMatch.sourcePoints(inliers, points);
                    if (Transforms.isIdentity(model, points, cp.identityTolerance)) {
                        IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                        candidates.removeAll(inliers);
                        inliers.clear();
                        again = true;
                    }
                }
            } while (again);
        } catch (final NotEnoughDataPointsException e) {
            modelFound = false;
        }
        if (modelFound) {
            Utils.log("Model found for graphs in layer \"" + layer1.getTitle() + "\" and \"" + layer2.getTitle() + "\":\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - s) + " ms");
            final AffineTransform b = new AffineTransform();
            b.translate(graph2Box.x, graph2Box.y);
            b.scale(1.0f / scale, 1.0f / scale);
            b.concatenate(model.createAffine());
            b.scale(scale, scale);
            b.translate(-graph1Box.x, -graph1Box.y);
            for (final Displayable d : selection1.getSelected(Patch.class)) d.getAffineTransform().preConcatenate(b);
            /* assign patch affine transformation to the tile model */
            for (final Tile<?> t : graph1) ((AbstractAffineTile2D<?>) t).initModel();
            Display.repaint(layer1);
        } else
            IJ.log("No model found for graphs in layer \"" + layer1.getTitle() + "\" and \"" + layer2.getTitle() + "\":\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
    }
    return modelFound;
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Selection(ini.trakem2.display.Selection) Rectangle(java.awt.Rectangle) ArrayList(java.util.ArrayList) Feature(mpicbg.imagefeatures.Feature) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Displayable(ini.trakem2.display.Displayable) Point(mpicbg.models.Point) Point(mpicbg.models.Point) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) Patch(ini.trakem2.display.Patch)

Example 12 with Displayable

use of ini.trakem2.display.Displayable in project TrakEM2 by trakem2.

the class AbstractAffineTile2D method pairOverlappingTiles.

/**
 * Search a {@link List} of {@link AbstractAffineTile2D Tiles} for
 * overlapping pairs.  Adds the pairs into tilePairs.
 *
 * @param tiles
 * @param tilePairs
 */
public static final <AAT extends AbstractAffineTile2D<?>> void pairOverlappingTiles(final List<AAT> tiles, final List<AbstractAffineTile2D<?>[]> tilePairs, final boolean sloppyOverlapTest) {
    final HashSet<Patch> visited = new HashSet<Patch>();
    final ArrayList<AbstractAffineTile2D<?>[]> tilePairCandidates = new ArrayList<AbstractAffineTile2D<?>[]>();
    /* LUT for tiles */
    final Hashtable<Patch, AAT> lut = new Hashtable<Patch, AAT>();
    for (final AAT tile : tiles) lut.put(tile.patch, tile);
    for (int a = 0; a < tiles.size(); ++a) {
        final AbstractAffineTile2D<?> ta = tiles.get(a);
        final Patch pa = ta.patch;
        visited.add(pa);
        final Layer la = pa.getLayer();
        for (final Displayable d : la.getDisplayables(Patch.class, pa.getBoundingBox())) {
            final Patch pb = (Patch) d;
            if (visited.contains(pb))
                continue;
            final AAT tb = lut.get(pb);
            if (tb == null)
                continue;
            tilePairCandidates.add(new AbstractAffineTile2D<?>[] { ta, tb });
        }
    }
    if (sloppyOverlapTest)
        tilePairs.addAll(tilePairCandidates);
    else {
        // TODO Fix this and use what the user wants to provide
        final ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
        final ArrayList<Future<?>> futures = new ArrayList<Future<?>>();
        for (final AbstractAffineTile2D<?>[] tatb : tilePairCandidates) {
            futures.add(exec.submit(new Runnable() {

                @Override
                public void run() {
                    if (tatb[0].intersects(tatb[1]))
                        synchronized (tilePairs) {
                            tilePairs.add(tatb);
                        }
                }
            }));
        }
        try {
            for (final Future<?> f : futures) f.get();
        } catch (final InterruptedException e) {
            e.printStackTrace();
        } catch (final ExecutionException e) {
            e.printStackTrace();
        } finally {
            exec.shutdown();
        }
    }
// // 1. Precompute the Area of each tile's Patch
// final HashMap< Patch, Pair< AAT, Area > > m = new HashMap< Patch, Pair< AAT, Area > >();
// // A lazy collection of pairs, computed in parallel ahead of consumption
// final Iterable< Pair< AAT, Area > > ts =
// new ParallelMapping< AAT, Pair< AAT, Area > >(
// tiles,
// new TaskFactory< AAT, Pair< AAT, Area > >() {
// @Override
// public Pair< AAT, Area > process( final AAT tile ) {
// return new Pair< AAT, Area >( tile, tile.patch.getArea() );
// }
// }
// );
// for ( final Pair< AAT, Area > t : ts) {
// m.put( t.a.patch, t );
// }
// 
// // 2. Obtain the list of tile pairs, at one list per tile
// final Iterable< List<AbstractAffineTile2D< ? >[]> > pairsList =
// new ParallelMapping< AAT, List<AbstractAffineTile2D< ? >[]> >(
// tiles,
// new TaskFactory< AAT, List<AbstractAffineTile2D< ? >[]> >() {
// @Override
// public List<AbstractAffineTile2D< ? >[]> process( final AAT ta ) {
// final Area a;
// synchronized (m) {
// a = m.get( ta.patch ).b;
// }
// final ArrayList<AbstractAffineTile2D< ? >[]> seq = new ArrayList<AbstractAffineTile2D< ? >[]>();
// for ( final Patch p : ta.patch.getLayer().getIntersecting( ta.patch, Patch.class ) ) {
// if ( p == ta.patch )
// continue;
// final Pair< AAT, Area > pair;
// synchronized (m) {
// pair =  m.get(p);
// }
// // Check that the Patch is among those to consider in the alignment
// if ( null != pair ) {
// // Check that the Patch visible pixels overlap -- may not if it has an alpha mask or coordinate transform
// if ( M.intersects( a, pair.b ) ) {
// seq.add( new AbstractAffineTile2D< ? >[]{ ta, pair.a });
// }
// }
// }
// return seq;
// }
// }
// );
// 
// for (final List<AbstractAffineTile2D<?>[]> list: pairsList) {
// tilePairs.addAll(list);
// }
}
Also used : Displayable(ini.trakem2.display.Displayable) Hashtable(java.util.Hashtable) ArrayList(java.util.ArrayList) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) Patch(ini.trakem2.display.Patch) HashSet(java.util.HashSet)

Example 13 with Displayable

use of ini.trakem2.display.Displayable in project TrakEM2 by trakem2.

the class Align method alignSelectedPatches.

/**
 * Align a selection of {@link Patch patches} in a Layer.
 *
 * @param selection
 * @param numThreads
 */
public static final void alignSelectedPatches(final Selection selection, final int numThreads) {
    final List<Patch> patches = new ArrayList<Patch>();
    for (final Displayable d : selection.getSelected()) if (d instanceof Patch)
        patches.add((Patch) d);
    if (patches.size() < 2)
        return;
    if (!paramOptimize.setup("Align selected patches"))
        return;
    final List<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
    final List<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
    final List<Patch> fixedPatches = new ArrayList<Patch>();
    final Displayable active = selection.getActive();
    if (active != null && active instanceof Patch)
        fixedPatches.add((Patch) active);
    tilesFromPatches(paramOptimize, patches, fixedPatches, tiles, fixedTiles);
    alignTiles(paramOptimize, tiles, fixedTiles, numThreads);
    for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.getModel().createAffine());
}
Also used : Displayable(ini.trakem2.display.Displayable) ArrayList(java.util.ArrayList) Patch(ini.trakem2.display.Patch)

Example 14 with Displayable

use of ini.trakem2.display.Displayable in project TrakEM2 by trakem2.

the class Align method alignLayer.

/**
 * Align all {@link Patch patches} in a Layer.
 *
 * @param layer
 */
public static final void alignLayer(final Layer layer, final int numThreads) {
    if (!paramOptimize.setup("Align patches in layer"))
        return;
    final List<Displayable> displayables = layer.getDisplayables(Patch.class);
    final List<Patch> patches = new ArrayList<Patch>();
    for (final Displayable d : displayables) patches.add((Patch) d);
    final List<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
    final List<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
    tilesFromPatches(paramOptimize, patches, null, tiles, fixedTiles);
    alignTiles(paramOptimize, tiles, fixedTiles, numThreads);
    for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.getModel().createAffine());
}
Also used : Displayable(ini.trakem2.display.Displayable) ArrayList(java.util.ArrayList) Patch(ini.trakem2.display.Patch)

Example 15 with Displayable

use of ini.trakem2.display.Displayable in project TrakEM2 by trakem2.

the class AlignLayersTask method alignLayersNonLinearlyJob.

public static final void alignLayersNonLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
    // will reverse order if necessary
    final List<Layer> layerRange = layerSet.getLayers(first, last);
    final Align.Param p = Align.param.clone();
    // Remove all empty layers
    for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
        if (!it.next().contains(Patch.class, true)) {
            it.remove();
        }
    }
    if (0 == layerRange.size()) {
        Utils.log("No layers in range show any images!");
        return;
    }
    /* do not work if there is only one layer selected */
    if (layerRange.size() < 2)
        return;
    final List<Patch> all = new ArrayList<Patch>();
    for (final Layer la : layerRange) {
        for (final Patch patch : la.getAll(Patch.class)) {
            if (null != filter && !filter.accept(patch))
                continue;
            all.add(patch);
        }
    }
    AlignTask.transformPatchesAndVectorData(all, new Runnable() {

        @Override
        public void run() {
            // ///
            final Loader loader = layerSet.getProject().getLoader();
            // Not concurrent safe! So two copies, one per layer and Thread:
            final SIFT ijSIFT1 = new SIFT(new FloatArray2DSIFT(p.sift));
            final SIFT ijSIFT2 = new SIFT(new FloatArray2DSIFT(p.sift));
            final Collection<Feature> features1 = new ArrayList<Feature>();
            final Collection<Feature> features2 = new ArrayList<Feature>();
            final List<PointMatch> candidates = new ArrayList<PointMatch>();
            final List<PointMatch> inliers = new ArrayList<PointMatch>();
            final int n_proc = Runtime.getRuntime().availableProcessors() > 1 ? 2 : 1;
            final ExecutorService exec = Utils.newFixedThreadPool(n_proc, "alignLayersNonLinearly");
            List<Patch> previousPatches = null;
            int s = 0;
            for (int i = 1; i < layerRange.size(); ++i) {
                if (Thread.currentThread().isInterrupted())
                    break;
                final Layer layer1 = layerRange.get(i - 1);
                final Layer layer2 = layerRange.get(i);
                final long t0 = System.currentTimeMillis();
                features1.clear();
                features2.clear();
                final Rectangle box1 = null == fov ? layer1.getMinimalBoundingBox(Patch.class, true) : fov;
                final Rectangle box2 = null == fov ? layer2.getMinimalBoundingBox(Patch.class, true) : fov;
                /* calculate the common scale factor for both flat images */
                final double scale = Math.min(1.0f, (double) p.sift.maxOctaveSize / (double) Math.max(box1.width, Math.max(box1.height, Math.max(box2.width, box2.height))));
                final List<Patch> patches1;
                if (null == previousPatches) {
                    patches1 = layer1.getAll(Patch.class);
                    if (null != filter) {
                        for (final Iterator<Patch> it = patches1.iterator(); it.hasNext(); ) {
                            if (!filter.accept(it.next()))
                                it.remove();
                        }
                    }
                } else {
                    patches1 = previousPatches;
                }
                final List<Patch> patches2 = layer2.getAll(Patch.class);
                if (null != filter) {
                    for (final Iterator<Patch> it = patches2.iterator(); it.hasNext(); ) {
                        if (!filter.accept(it.next()))
                            it.remove();
                    }
                }
                final Future<ImageProcessor> fu1 = exec.submit(new Callable<ImageProcessor>() {

                    @Override
                    public ImageProcessor call() {
                        final ImageProcessor ip1 = loader.getFlatImage(layer1, box1, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches1, true).getProcessor();
                        ijSIFT1.extractFeatures(ip1, features1);
                        Utils.log(features1.size() + " features extracted in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
                        return ip1;
                    }
                });
                final Future<ImageProcessor> fu2 = exec.submit(new Callable<ImageProcessor>() {

                    @Override
                    public ImageProcessor call() {
                        final ImageProcessor ip2 = loader.getFlatImage(layer2, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches2, true).getProcessor();
                        ijSIFT2.extractFeatures(ip2, features2);
                        Utils.log(features2.size() + " features extracted in layer \"" + layer2.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
                        return ip2;
                    }
                });
                final ImageProcessor ip1, ip2;
                try {
                    ip1 = fu1.get();
                    ip2 = fu2.get();
                } catch (final Exception e) {
                    IJError.print(e);
                    return;
                }
                if (features1.size() > 0 && features2.size() > 0) {
                    final long t1 = System.currentTimeMillis();
                    candidates.clear();
                    FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
                    final AbstractAffineModel2D<?> model;
                    switch(p.expectedModelIndex) {
                        case 0:
                            model = new TranslationModel2D();
                            break;
                        case 1:
                            model = new RigidModel2D();
                            break;
                        case 2:
                            model = new SimilarityModel2D();
                            break;
                        case 3:
                            model = new AffineModel2D();
                            break;
                        default:
                            return;
                    }
                    boolean modelFound;
                    boolean again = false;
                    try {
                        do {
                            again = false;
                            modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
                            if (modelFound && p.rejectIdentity) {
                                final ArrayList<Point> points = new ArrayList<Point>();
                                PointMatch.sourcePoints(inliers, points);
                                if (Transforms.isIdentity(model, points, p.identityTolerance)) {
                                    IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                                    candidates.removeAll(inliers);
                                    inliers.clear();
                                    again = true;
                                }
                            }
                        } while (again);
                    } catch (final NotEnoughDataPointsException e) {
                        modelFound = false;
                    }
                    if (modelFound) {
                        IJ.log("Model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n  correspondences  " + inliers.size() + " of " + candidates.size() + "\n  average residual error  " + (model.getCost() / scale) + " px\n  took " + (System.currentTimeMillis() - t1) + " ms");
                        final ImagePlus imp1 = new ImagePlus("target", ip1);
                        final ImagePlus imp2 = new ImagePlus("source", ip2);
                        final List<Point> sourcePoints = new ArrayList<Point>();
                        final List<Point> targetPoints = new ArrayList<Point>();
                        PointMatch.sourcePoints(inliers, sourcePoints);
                        PointMatch.targetPoints(inliers, targetPoints);
                        imp2.setRoi(Util.pointsToPointRoi(sourcePoints));
                        imp1.setRoi(Util.pointsToPointRoi(targetPoints));
                        final ImageProcessor mask1 = ip1.duplicate();
                        mask1.threshold(1);
                        final ImageProcessor mask2 = ip2.duplicate();
                        mask2.threshold(1);
                        final Transformation warp = bUnwarpJ_.computeTransformationBatch(imp2, imp1, mask2, mask1, elasticParam);
                        final CubicBSplineTransform transf = new CubicBSplineTransform();
                        transf.set(warp.getIntervals(), warp.getDirectDeformationCoefficientsX(), warp.getDirectDeformationCoefficientsY(), imp2.getWidth(), imp2.getHeight());
                        final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
                        // Transform desired patches only
                        for (final Patch patch : patches2) {
                            try {
                                final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
                                final AffineTransform at = patch.getAffineTransform();
                                final AffineTransform pat = new AffineTransform();
                                pat.scale(scale, scale);
                                pat.translate(-box2.x, -box2.y);
                                pat.concatenate(at);
                                pat.translate(-pbox.x, -pbox.y);
                                final mpicbg.trakem2.transform.AffineModel2D toWorld = new mpicbg.trakem2.transform.AffineModel2D();
                                toWorld.set(pat);
                                final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
                                // move the patch into the global space where bUnwarpJ calculated the transformation
                                ctl.add(toWorld);
                                // Apply non-linear transformation
                                ctl.add(transf);
                                // move it back
                                ctl.add(toWorld.createInverse());
                                patch.appendCoordinateTransform(ctl);
                                fus.add(patch.updateMipMaps());
                                // Compensate for offset between boxes
                                final AffineTransform offset = new AffineTransform();
                                offset.translate(box1.x - box2.x, box1.y - box2.y);
                                offset.concatenate(at);
                                patch.setAffineTransform(offset);
                            } catch (final Exception e) {
                                e.printStackTrace();
                            }
                        }
                        // await regeneration of all mipmaps
                        Utils.wait(fus);
                        Display.repaint(layer2);
                    } else
                        IJ.log("No model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n  correspondence candidates  " + candidates.size() + "\n  took " + (System.currentTimeMillis() - s) + " ms");
                }
                IJ.showProgress(++s, layerRange.size());
                // for next iteration
                previousPatches = patches2;
            }
            exec.shutdown();
            if (propagateTransform)
                Utils.log("Propagation not implemented yet for non-linear layer alignment.");
        /* // CANNOT be done (at least not trivially:
		 * //an appropriate "scale" cannot be computed, and the box2 is part of the spline computation.
		if ( propagateTransform && null != lastTransform )
		{
			for (final Layer la : l.getParent().getLayers(last > first ? last +1 : first -1, last > first ? l.getParent().size() -1 : 0)) {
				// Transform visible patches only
				final Rectangle box2 = la.getMinimalBoundingBox( Patch.class, true );
				for ( final Displayable disp : la.getDisplayables( Patch.class, true ) )
				{
					// ...
				}
			}
		}
		*/
        }
    });
// end of transformPatchesAndVectorData
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) SIFT(mpicbg.ij.SIFT) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) Transformation(bunwarpj.Transformation) CoordinateTransformList(mpicbg.trakem2.transform.CoordinateTransformList) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) Loader(ini.trakem2.persistence.Loader) Feature(mpicbg.imagefeatures.Feature) Callable(java.util.concurrent.Callable) ImageProcessor(ij.process.ImageProcessor) RigidModel2D(mpicbg.trakem2.transform.RigidModel2D) Iterator(java.util.Iterator) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) CoordinateTransformList(mpicbg.trakem2.transform.CoordinateTransformList) ArrayList(java.util.ArrayList) List(java.util.List) SimilarityModel2D(mpicbg.models.SimilarityModel2D) Point(mpicbg.models.Point) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) Layer(ini.trakem2.display.Layer) ImagePlus(ij.ImagePlus) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) IllDefinedDataPointsException(mpicbg.models.IllDefinedDataPointsException) FloatArray2DSIFT(mpicbg.imagefeatures.FloatArray2DSIFT) PointMatch(mpicbg.models.PointMatch) ExecutorService(java.util.concurrent.ExecutorService) Collection(java.util.Collection) Future(java.util.concurrent.Future) AffineTransform(java.awt.geom.AffineTransform) TranslationModel2D(mpicbg.trakem2.transform.TranslationModel2D) CubicBSplineTransform(bunwarpj.trakem2.transform.CubicBSplineTransform) Patch(ini.trakem2.display.Patch) CoordinateTransform(mpicbg.trakem2.transform.CoordinateTransform)

Aggregations

Displayable (ini.trakem2.display.Displayable)50 ArrayList (java.util.ArrayList)36 ZDisplayable (ini.trakem2.display.ZDisplayable)29 Patch (ini.trakem2.display.Patch)27 HashMap (java.util.HashMap)24 HashSet (java.util.HashSet)23 Layer (ini.trakem2.display.Layer)21 Rectangle (java.awt.Rectangle)17 ProjectThing (ini.trakem2.tree.ProjectThing)15 DBObject (ini.trakem2.persistence.DBObject)14 ImagePlus (ij.ImagePlus)13 LayerSet (ini.trakem2.display.LayerSet)12 Point (java.awt.Point)11 AffineTransform (java.awt.geom.AffineTransform)11 Map (java.util.Map)11 GenericDialog (ij.gui.GenericDialog)10 Collection (java.util.Collection)10 Worker (ini.trakem2.utils.Worker)9 Area (java.awt.geom.Area)9 TreeMap (java.util.TreeMap)9