Search in sources :

Example 1 with Triple

use of mpicbg.trakem2.util.Triple in project TrakEM2 by trakem2.

the class ElasticLayerAlignment method exec.

/**
 * @param param
 * @param layerRange
 * @param fixedLayers
 * @param emptyLayers
 * @param box
 * @param filter
 * @param useTps true if using TPS transforms, otherwise MLS
 * @throws Exception
 */
@SuppressWarnings("deprecation")
public final void exec(final Param param, final Project project, final List<Layer> layerRange, final Set<Layer> fixedLayers, final Set<Layer> emptyLayers, final Rectangle box, final boolean propagateTransformBefore, final boolean propagateTransformAfter, final Filter<Patch> filter) throws Exception {
    final ExecutorService service = ExecutorProvider.getExecutorService(1.0f);
    /* create tiles and models for all layers */
    final ArrayList<Tile<?>> tiles = new ArrayList<Tile<?>>();
    for (int i = 0; i < layerRange.size(); ++i) {
        switch(param.desiredModelIndex) {
            case 0:
                tiles.add(new Tile<TranslationModel2D>(new TranslationModel2D()));
                break;
            case 1:
                tiles.add(new Tile<RigidModel2D>(new RigidModel2D()));
                break;
            case 2:
                tiles.add(new Tile<SimilarityModel2D>(new SimilarityModel2D()));
                break;
            case 3:
                tiles.add(new Tile<AffineModel2D>(new AffineModel2D()));
                break;
            case 4:
                tiles.add(new Tile<HomographyModel2D>(new HomographyModel2D()));
                break;
            default:
                return;
        }
    }
    /* collect all pairs of slices for which a model could be found */
    final ArrayList<Triple<Integer, Integer, AbstractModel<?>>> pairs = new ArrayList<Triple<Integer, Integer, AbstractModel<?>>>();
    if (!param.isAligned) {
        preAlignStack(param, project, layerRange, box, filter, pairs);
    } else {
        for (int i = 0; i < layerRange.size(); ++i) {
            final int range = Math.min(layerRange.size(), i + param.maxNumNeighbors + 1);
            for (int j = i + 1; j < range; ++j) {
                pairs.add(new Triple<Integer, Integer, AbstractModel<?>>(i, j, new TranslationModel2D()));
            }
        }
    }
    /* Elastic alignment */
    /* Initialization */
    final TileConfiguration initMeshes = new TileConfiguration();
    final int meshWidth = (int) Math.ceil(box.width * param.layerScale);
    final int meshHeight = (int) Math.ceil(box.height * param.layerScale);
    final ArrayList<SpringMesh> meshes = new ArrayList<SpringMesh>(layerRange.size());
    for (int i = 0; i < layerRange.size(); ++i) {
        meshes.add(new SpringMesh(param.resolutionSpringMesh, meshWidth, meshHeight, param.stiffnessSpringMesh, param.maxStretchSpringMesh * param.layerScale, param.dampSpringMesh));
    }
    // final int blockRadius = Math.max( 32, meshWidth / p.resolutionSpringMesh / 2 );
    final int blockRadius = Math.max(16, mpicbg.util.Util.roundPos(param.layerScale * param.blockRadius));
    Utils.log("effective block radius = " + blockRadius);
    final ArrayList<Future<BlockMatchPairCallable.BlockMatchResults>> futures = new ArrayList<Future<BlockMatchPairCallable.BlockMatchResults>>(pairs.size());
    for (final Triple<Integer, Integer, AbstractModel<?>> pair : pairs) {
        /* free memory */
        project.getLoader().releaseAll();
        final SpringMesh m1 = meshes.get(pair.a);
        final SpringMesh m2 = meshes.get(pair.b);
        final ArrayList<Vertex> v1 = m1.getVertices();
        final ArrayList<Vertex> v2 = m2.getVertices();
        final Layer layer1 = layerRange.get(pair.a);
        final Layer layer2 = layerRange.get(pair.b);
        final boolean layer1Fixed = fixedLayers.contains(layer1);
        final boolean layer2Fixed = fixedLayers.contains(layer2);
        if (!(layer1Fixed && layer2Fixed)) {
            final BlockMatchPairCallable bmpc = new BlockMatchPairCallable(pair, layerRange, layer1Fixed, layer2Fixed, filter, param, v1, v2, box);
            futures.add(service.submit(bmpc));
        }
    }
    for (final Future<BlockMatchPairCallable.BlockMatchResults> future : futures) {
        final BlockMatchPairCallable.BlockMatchResults results = future.get();
        final Collection<PointMatch> pm12 = results.pm12, pm21 = results.pm21;
        final Triple<Integer, Integer, AbstractModel<?>> pair = results.pair;
        final Tile<?> t1 = tiles.get(pair.a);
        final Tile<?> t2 = tiles.get(pair.b);
        final SpringMesh m1 = meshes.get(pair.a);
        final SpringMesh m2 = meshes.get(pair.b);
        final double springConstant = 1.0 / (pair.b - pair.a);
        final boolean layer1Fixed = results.layer1Fixed;
        final boolean layer2Fixed = results.layer2Fixed;
        if (layer1Fixed) {
            initMeshes.fixTile(t1);
        } else {
            if (param.useLocalSmoothnessFilter) {
                Utils.log(pair.a + " > " + pair.b + ": " + pm12.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log(pair.a + " > " + pair.b + ": found " + pm12.size() + " correspondences.");
            }
            for (final PointMatch pm : pm12) {
                final Vertex p1 = (Vertex) pm.getP1();
                final Vertex p2 = new Vertex(pm.getP2());
                p1.addSpring(p2, new Spring(0, springConstant));
                m2.addPassiveVertex(p2);
            }
            /*
                * adding Tiles to the initialing TileConfiguration, adding a Tile
                * multiple times does not harm because the TileConfiguration is
                * backed by a Set.
                */
            if (pm12.size() > pair.c.getMinNumMatches()) {
                initMeshes.addTile(t1);
                initMeshes.addTile(t2);
                t1.connect(t2, pm12);
            }
        }
        if (layer2Fixed)
            initMeshes.fixTile(t2);
        else {
            if (param.useLocalSmoothnessFilter) {
                Utils.log(pair.a + " < " + pair.b + ": " + pm21.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log(pair.a + " < " + pair.b + ": found " + pm21.size() + " correspondences.");
            }
            for (final PointMatch pm : pm21) {
                final Vertex p1 = (Vertex) pm.getP1();
                final Vertex p2 = new Vertex(pm.getP2());
                p1.addSpring(p2, new Spring(0, springConstant));
                m1.addPassiveVertex(p2);
            }
            /*
                * adding Tiles to the initialing TileConfiguration, adding a Tile
                * multiple times does not harm because the TileConfiguration is
                * backed by a Set.
                */
            if (pm21.size() > pair.c.getMinNumMatches()) {
                initMeshes.addTile(t1);
                initMeshes.addTile(t2);
                t2.connect(t1, pm21);
            }
        }
        Utils.log(pair.a + " <> " + pair.b + " spring constant = " + springConstant);
    }
    /* pre-align by optimizing a piecewise linear model */
    initMeshes.optimize(param.maxEpsilon * param.layerScale, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh);
    for (int i = 0; i < layerRange.size(); ++i) meshes.get(i).init(tiles.get(i).getModel());
    /* optimize the meshes */
    try {
        final long t0 = System.currentTimeMillis();
        Utils.log("Optimizing spring meshes...");
        if (param.useLegacyOptimizer) {
            Utils.log("  ...using legacy optimizer...");
            SpringMesh.optimizeMeshes2(meshes, param.maxEpsilon * param.layerScale, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        } else {
            SpringMesh.optimizeMeshes(meshes, param.maxEpsilon * param.layerScale, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        }
        Utils.log("Done optimizing spring meshes. Took " + (System.currentTimeMillis() - t0) + " ms");
    } catch (final NotEnoughDataPointsException e) {
        Utils.log("There were not enough data points to get the spring mesh optimizing.");
        e.printStackTrace();
        return;
    }
    /* translate relative to bounding box */
    for (final SpringMesh mesh : meshes) {
        for (final PointMatch pm : mesh.getVA().keySet()) {
            final Point p1 = pm.getP1();
            final Point p2 = pm.getP2();
            final double[] l = p1.getL();
            final double[] w = p2.getW();
            l[0] = l[0] / param.layerScale + box.x;
            l[1] = l[1] / param.layerScale + box.y;
            w[0] = w[0] / param.layerScale + box.x;
            w[1] = w[1] / param.layerScale + box.y;
        }
    }
    /* free memory */
    project.getLoader().releaseAll();
    final Layer first = layerRange.get(0);
    final List<Layer> layers = first.getParent().getLayers();
    final LayerSet ls = first.getParent();
    final Area infArea = AreaUtils.infiniteArea();
    final List<VectorData> vectorData = new ArrayList<VectorData>();
    for (final Layer layer : ls.getLayers()) {
        vectorData.addAll(Utils.castCollection(layer.getDisplayables(VectorData.class, false, true), VectorData.class, true));
    }
    vectorData.addAll(Utils.castCollection(ls.getZDisplayables(VectorData.class, true), VectorData.class, true));
    /* transfer layer transform into patch transforms and append to patches */
    if (propagateTransformBefore || propagateTransformAfter) {
        if (propagateTransformBefore) {
            final ThinPlateSplineTransform tps = makeTPS(meshes.get(0).getVA().keySet());
            final int firstLayerIndex = first.getParent().getLayerIndex(first.getId());
            for (int i = 0; i < firstLayerIndex; ++i) {
                applyTransformToLayer(layers.get(i), tps, filter);
                for (final VectorData vd : vectorData) {
                    vd.apply(layers.get(i), infArea, tps);
                }
            }
        }
        if (propagateTransformAfter) {
            final Layer last = layerRange.get(layerRange.size() - 1);
            final CoordinateTransform ct;
            if (param.useTps)
                ct = makeTPS(meshes.get(meshes.size() - 1).getVA().keySet());
            else {
                final MovingLeastSquaresTransform2 mls = new MovingLeastSquaresTransform2();
                mls.setMatches(meshes.get(meshes.size() - 1).getVA().keySet());
                ct = mls;
            }
            final int lastLayerIndex = last.getParent().getLayerIndex(last.getId());
            for (int i = lastLayerIndex + 1; i < layers.size(); ++i) {
                applyTransformToLayer(layers.get(i), ct, filter);
                for (final VectorData vd : vectorData) {
                    vd.apply(layers.get(i), infArea, ct);
                }
            }
        }
    }
    for (int l = 0; l < layerRange.size(); ++l) {
        IJ.showStatus("Applying transformation to patches ...");
        IJ.showProgress(0, layerRange.size());
        final Layer layer = layerRange.get(l);
        final ThinPlateSplineTransform tps = makeTPS(meshes.get(l).getVA().keySet());
        applyTransformToLayer(layer, tps, filter);
        for (final VectorData vd : vectorData) {
            vd.apply(layer, infArea, tps);
        }
        if (Thread.interrupted()) {
            Utils.log("Interrupted during applying transformations to patches.  No all patches have been updated.  Re-generate mipmaps manually.");
        }
        IJ.showProgress(l + 1, layerRange.size());
    }
    /* update patch mipmaps */
    final int firstLayerIndex;
    final int lastLayerIndex;
    if (propagateTransformBefore)
        firstLayerIndex = 0;
    else {
        firstLayerIndex = first.getParent().getLayerIndex(first.getId());
    }
    if (propagateTransformAfter)
        lastLayerIndex = layers.size() - 1;
    else {
        final Layer last = layerRange.get(layerRange.size() - 1);
        lastLayerIndex = last.getParent().getLayerIndex(last.getId());
    }
    for (int i = firstLayerIndex; i <= lastLayerIndex; ++i) {
        final Layer layer = layers.get(i);
        if (!(emptyLayers.contains(layer) || fixedLayers.contains(layer))) {
            for (final Patch patch : AlignmentUtils.filterPatches(layer, filter)) patch.updateMipMaps();
        }
    }
    Utils.log("Done.");
}
Also used : AbstractModel(mpicbg.models.AbstractModel) BlockMatchPairCallable(mpicbg.trakem2.align.concurrent.BlockMatchPairCallable) ArrayList(java.util.ArrayList) RigidModel2D(mpicbg.models.RigidModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) LayerSet(ini.trakem2.display.LayerSet) HomographyModel2D(mpicbg.models.HomographyModel2D) Spring(mpicbg.models.Spring) Triple(mpicbg.trakem2.util.Triple) Area(java.awt.geom.Area) TranslationModel2D(mpicbg.models.TranslationModel2D) TileConfiguration(mpicbg.models.TileConfiguration) CoordinateTransform(mpicbg.trakem2.transform.CoordinateTransform) Patch(ini.trakem2.display.Patch) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) Vertex(mpicbg.models.Vertex) SpringMesh(mpicbg.models.SpringMesh) ThinPlateSplineTransform(mpicbg.trakem2.transform.ThinPlateSplineTransform) VectorData(ini.trakem2.display.VectorData) AffineModel2D(mpicbg.models.AffineModel2D) Tile(mpicbg.models.Tile) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) PointMatch(mpicbg.models.PointMatch) MovingLeastSquaresTransform2(mpicbg.trakem2.transform.MovingLeastSquaresTransform2) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future)

Example 2 with Triple

use of mpicbg.trakem2.util.Triple in project TrakEM2 by trakem2.

the class ElasticMontage method exec.

@SuppressWarnings("deprecation")
public final void exec(final Param param, final List<Patch> patches, final Set<Patch> fixedPatches) throws Exception {
    /* free memory */
    patches.get(0).getProject().getLoader().releaseAll();
    /* create tiles and models for all patches */
    final ArrayList<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
    final ArrayList<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
    Align.tilesFromPatches(param.po, patches, fixedPatches, tiles, fixedTiles);
    if (!param.isAligned) {
        Align.alignTiles(param.po, tiles, fixedTiles, param.tilesAreInPlace, param.maxNumThreads);
        /* Apply the estimated affine transform to patches */
        for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.createAffine());
        Display.update();
    }
    /* generate tile pairs for all by now overlapping tiles */
    final ArrayList<AbstractAffineTile2D<?>[]> tilePairs = new ArrayList<AbstractAffineTile2D<?>[]>();
    AbstractAffineTile2D.pairOverlappingTiles(tiles, tilePairs);
    /* check if there was any pair */
    if (tilePairs.size() == 0) {
        Utils.log("Elastic montage could not find any overlapping patches after pre-montaging.");
        return;
    }
    Utils.log(tilePairs.size() + " pairs of patches will be block-matched...");
    /* make pairwise global models local */
    final ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>> pairs = new ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>>();
    /*
		 * The following casting madness is necessary to get this code compiled
		 * with Sun/Oracle Java 6 which otherwise generates an inconvertible
		 * type exception.
		 *
		 * TODO Remove as soon as this bug is fixed in Sun/Oracle javac.
		 */
    for (final AbstractAffineTile2D<?>[] pair : tilePairs) {
        final AbstractAffineModel2D<?> m;
        switch(param.po.desiredModelIndex) {
            case 0:
                final TranslationModel2D t = (TranslationModel2D) (Object) pair[1].getModel().createInverse();
                t.concatenate((TranslationModel2D) (Object) pair[0].getModel());
                m = t;
                break;
            case 1:
                final RigidModel2D r = (RigidModel2D) (Object) pair[1].getModel().createInverse();
                r.concatenate((RigidModel2D) (Object) pair[0].getModel());
                m = r;
                break;
            case 2:
                final SimilarityModel2D s = (SimilarityModel2D) (Object) pair[1].getModel().createInverse();
                s.concatenate((SimilarityModel2D) (Object) pair[0].getModel());
                m = s;
                break;
            case 3:
                final AffineModel2D a = (AffineModel2D) (Object) pair[1].getModel().createInverse();
                a.concatenate((AffineModel2D) (Object) pair[0].getModel());
                m = a;
                break;
            default:
                m = null;
        }
        pairs.add(new Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>(pair[0], pair[1], m));
    }
    /* Elastic alignment */
    /* Initialization */
    final double springTriangleHeightTwice = 2 * Math.sqrt(0.75 * param.springLengthSpringMesh * param.springLengthSpringMesh);
    final ArrayList<SpringMesh> meshes = new ArrayList<SpringMesh>(tiles.size());
    final HashMap<AbstractAffineTile2D<?>, SpringMesh> tileMeshMap = new HashMap<AbstractAffineTile2D<?>, SpringMesh>();
    for (final AbstractAffineTile2D<?> tile : tiles) {
        final double w = tile.getWidth();
        final double h = tile.getHeight();
        final int numX = Math.max(2, (int) Math.ceil(w / param.springLengthSpringMesh) + 1);
        final int numY = Math.max(2, (int) Math.ceil(h / springTriangleHeightTwice) + 1);
        final double wMesh = (numX - 1) * param.springLengthSpringMesh;
        final double hMesh = (numY - 1) * springTriangleHeightTwice;
        final SpringMesh mesh = new SpringMesh(numX, numY, wMesh, hMesh, param.stiffnessSpringMesh, param.maxStretchSpringMesh * param.bmScale, param.dampSpringMesh);
        meshes.add(mesh);
        tileMeshMap.put(tile, mesh);
    }
    // final int blockRadius = Math.max( 32, Util.roundPos( param.springLengthSpringMesh / 2 ) );
    final int blockRadius = Math.max(Util.roundPos(16 / param.bmScale), param.bmBlockRadius);
    /**
     * TODO set this something more than the largest error by the approximate model
     */
    final int searchRadius = param.bmSearchRadius;
    final AbstractModel<?> localSmoothnessFilterModel = mpicbg.trakem2.align.Util.createModel(param.bmLocalModelIndex);
    for (final Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform> pair : pairs) {
        final AbstractAffineTile2D<?> t1 = pair.a;
        final AbstractAffineTile2D<?> t2 = pair.b;
        final SpringMesh m1 = tileMeshMap.get(t1);
        final SpringMesh m2 = tileMeshMap.get(t2);
        final ArrayList<PointMatch> pm12 = new ArrayList<PointMatch>();
        final ArrayList<PointMatch> pm21 = new ArrayList<PointMatch>();
        final ArrayList<Vertex> v1 = m1.getVertices();
        final ArrayList<Vertex> v2 = m2.getVertices();
        final String patchName1 = patchName(t1.getPatch());
        final String patchName2 = patchName(t2.getPatch());
        final PatchImage pi1 = t1.getPatch().createTransformedImage();
        if (pi1 == null) {
            Utils.log("Patch `" + patchName1 + "' failed generating a transformed image.  Skipping...");
            continue;
        }
        final PatchImage pi2 = t2.getPatch().createTransformedImage();
        if (pi2 == null) {
            Utils.log("Patch `" + patchName2 + "' failed generating a transformed image.  Skipping...");
            continue;
        }
        final FloatProcessor fp1 = (FloatProcessor) pi1.target.convertToFloat();
        final ByteProcessor mask1 = pi1.getMask();
        final FloatProcessor fpMask1 = mask1 == null ? null : scaleByte(mask1);
        final FloatProcessor fp2 = (FloatProcessor) pi2.target.convertToFloat();
        final ByteProcessor mask2 = pi2.getMask();
        final FloatProcessor fpMask2 = mask2 == null ? null : scaleByte(mask2);
        if (!fixedTiles.contains(t1)) {
            BlockMatching.matchByMaximalPMCC(fp1, fp2, fpMask1, fpMask2, param.bmScale, pair.c, blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v1, pm12, new ErrorStatistic(1));
            if (param.bmUseLocalSmoothnessFilter) {
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondence candidates.");
                localSmoothnessFilterModel.localSmoothnessFilter(pm12, pm12, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': " + pm12.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondences.");
            }
        } else {
            Utils.log("Skipping fixed patch `" + patchName1 + "'.");
        }
        if (!fixedTiles.contains(t2)) {
            BlockMatching.matchByMaximalPMCC(fp2, fp1, fpMask2, fpMask1, param.bmScale, pair.c.createInverse(), blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v2, pm21, new ErrorStatistic(1));
            if (param.bmUseLocalSmoothnessFilter) {
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondence candidates.");
                localSmoothnessFilterModel.localSmoothnessFilter(pm21, pm21, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': " + pm21.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondences.");
            }
        } else {
            Utils.log("Skipping fixed patch `" + patchName2 + "'.");
        }
        for (final PointMatch pm : pm12) {
            final Vertex p1 = (Vertex) pm.getP1();
            final Vertex p2 = new Vertex(pm.getP2());
            p1.addSpring(p2, new Spring(0, 1.0f));
            m2.addPassiveVertex(p2);
        }
        for (final PointMatch pm : pm21) {
            final Vertex p1 = (Vertex) pm.getP1();
            final Vertex p2 = new Vertex(pm.getP2());
            p1.addSpring(p2, new Spring(0, 1.0f));
            m1.addPassiveVertex(p2);
        }
    }
    /* initialize */
    for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) entry.getValue().init(entry.getKey().getModel());
    /* optimize the meshes */
    try {
        final long t0 = System.currentTimeMillis();
        IJ.log("Optimizing spring meshes...");
        if (param.useLegacyOptimizer) {
            Utils.log("  ...using legacy optimizer...");
            SpringMesh.optimizeMeshes2(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        } else {
            SpringMesh.optimizeMeshes(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        }
        IJ.log("Done optimizing spring meshes. Took " + (System.currentTimeMillis() - t0) + " ms");
    } catch (final NotEnoughDataPointsException e) {
        Utils.log("There were not enough data points to get the spring mesh optimizing.");
        e.printStackTrace();
        return;
    }
    /* apply */
    for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) {
        final AbstractAffineTile2D<?> tile = entry.getKey();
        if (!fixedTiles.contains(tile)) {
            final Patch patch = tile.getPatch();
            final SpringMesh mesh = entry.getValue();
            final Set<PointMatch> matches = mesh.getVA().keySet();
            Rectangle box = patch.getCoordinateTransformBoundingBox();
            /* compensate for existing coordinate transform bounding box */
            for (final PointMatch pm : matches) {
                final Point p1 = pm.getP1();
                final double[] l = p1.getL();
                l[0] += box.x;
                l[1] += box.y;
            }
            final ThinPlateSplineTransform mlt = ElasticLayerAlignment.makeTPS(matches);
            patch.appendCoordinateTransform(mlt);
            box = patch.getCoordinateTransformBoundingBox();
            patch.getAffineTransform().setToTranslation(box.x, box.y);
            patch.updateInDatabase("transform");
            patch.updateBucket();
            patch.updateMipMaps();
        }
    }
    Utils.log("Done.");
}
Also used : ByteProcessor(ij.process.ByteProcessor) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) Vertex(mpicbg.models.Vertex) SpringMesh(mpicbg.models.SpringMesh) ThinPlateSplineTransform(mpicbg.trakem2.transform.ThinPlateSplineTransform) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) RigidModel2D(mpicbg.models.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) FloatProcessor(ij.process.FloatProcessor) Point(mpicbg.models.Point) Spring(mpicbg.models.Spring) Point(mpicbg.models.Point) Triple(mpicbg.trakem2.util.Triple) PointMatch(mpicbg.models.PointMatch) PatchImage(ini.trakem2.display.Patch.PatchImage) InvertibleCoordinateTransform(mpicbg.models.InvertibleCoordinateTransform) ErrorStatistic(mpicbg.models.ErrorStatistic) TranslationModel2D(mpicbg.models.TranslationModel2D) HashMap(java.util.HashMap) Map(java.util.Map) Patch(ini.trakem2.display.Patch)

Example 3 with Triple

use of mpicbg.trakem2.util.Triple in project TrakEM2 by trakem2.

the class Loader method importImages.

/**
 * <p>Import images from the given text file, which is expected to contain 4 columns or optionally 9 columns:</p>
 * <ul>
 * <li>column 1: image file path (if base_dir is not null, it will be prepended)</li>
 * <li>column 2: x coord [px]</li>
 * <li>column 3: y coord [px]</li>
 * <li>column 4: z coord [px] (layer_thickness will be multiplied to it if not zero)</li>
 * </ul>
 * <p>optional columns, if a property is not known, it can be set to "-" which makes TrakEM2 open the file and find out by itself</p>
 * <ul>
 * <li>column 5: width [px]</li>
 * <li>column 6: height [px]</li>
 * <li>column 7: min intensity [double] (for screen display)</li>
 * <li>column 8: max intensity [double] (for screen display)</li>
 * <li>column 9: type [integer] (pixel types according to ImagepPlus types: 0=8bit int gray, 1=16bit int gray, 2=32bit float gray, 3=8bit indexed color, 4=32-bit RGB color</li>
 * </ul>
 *
 * <p>This function implements the "Import from text file" command.</p>
 *
 * <p>Layers will be automatically created as needed inside the LayerSet to which the given ref_layer belongs.</p>
 * <p>
 * The text file can contain comments that start with the # sign.
 * </p>
 * <p>
 * Images will be imported in parallel, using as many cores as your machine has.
 * </p>
 * @param calibration_ transforms the read coordinates into pixel coordinates, including x,y,z, and layer thickness.
 * @param scale_ Between 0 and 1. When lower than 1, a preprocessor script is created for the imported images, to scale them down.
 */
public Bureaucrat importImages(Layer ref_layer, String abs_text_file_path_, String column_separator_, double layer_thickness_, double calibration_, boolean homogenize_contrast_, float scale_, int border_width_) {
    // check parameters: ask for good ones if necessary
    if (null == abs_text_file_path_) {
        final String[] file = Utils.selectFile("Select text file");
        // user canceled dialog
        if (null == file)
            return null;
        abs_text_file_path_ = file[0] + file[1];
    }
    if (null == column_separator_ || 0 == column_separator_.length() || Double.isNaN(layer_thickness_) || layer_thickness_ <= 0 || Double.isNaN(calibration_) || calibration_ <= 0) {
        final Calibration cal = ref_layer.getParent().getCalibrationCopy();
        final GenericDialog gdd = new GenericDialog("Options");
        final String[] separators = new String[] { "tab", "space", "comma (,)" };
        gdd.addMessage("Choose a layer to act as the zero for the Z coordinates:");
        Utils.addLayerChoice("Base layer", ref_layer, gdd);
        gdd.addChoice("Column separator: ", separators, separators[0]);
        // default: 60 nm
        gdd.addNumericField("Layer thickness: ", cal.pixelDepth, 2);
        gdd.addNumericField("Calibration (data to pixels): ", 1, 2);
        gdd.addCheckbox("Homogenize contrast layer-wise", homogenize_contrast_);
        gdd.addSlider("Scale:", 0, 100, 100);
        gdd.addNumericField("Hide border with alpha mask", 0, 0, 6, "pixels");
        gdd.showDialog();
        if (gdd.wasCanceled())
            return null;
        layer_thickness_ = gdd.getNextNumber();
        if (layer_thickness_ < 0 || Double.isNaN(layer_thickness_)) {
            Utils.log("Improper layer thickness value.");
            return null;
        }
        calibration_ = gdd.getNextNumber();
        if (0 == calibration_ || Double.isNaN(calibration_)) {
            Utils.log("Improper calibration value.");
            return null;
        }
        // not pixelDepth!
        layer_thickness_ /= cal.pixelWidth;
        ref_layer = ref_layer.getParent().getLayer(gdd.getNextChoiceIndex());
        column_separator_ = "\t";
        switch(gdd.getNextChoiceIndex()) {
            case 1:
                column_separator_ = " ";
                break;
            case 2:
                column_separator_ = ",";
                break;
            default:
                break;
        }
        homogenize_contrast_ = gdd.getNextBoolean();
        final double sc = gdd.getNextNumber();
        if (Double.isNaN(sc))
            scale_ = 1.0f;
        else
            scale_ = ((float) sc) / 100.0f;
        final int border = (int) gdd.getNextNumber();
        if (border < 0) {
            Utils.log("Nonsensical border value: " + border);
            return null;
        }
        border_width_ = border;
    }
    if (Float.isNaN(scale_) || scale_ < 0 || scale_ > 1) {
        Utils.log("Non-sensical scale: " + scale_ + "\nUsing scale of 1 instead.");
        scale_ = 1;
    }
    // make vars accessible from inner threads:
    final Layer base_layer = ref_layer;
    final String abs_text_file_path = abs_text_file_path_;
    final String column_separator = column_separator_;
    final double layer_thickness = layer_thickness_;
    final double calibration = calibration_;
    final boolean homogenize_contrast = homogenize_contrast_;
    final float scale = (float) scale_;
    final int border_width = border_width_;
    return Bureaucrat.createAndStart(new Worker.Task("Importing images", true) {

        @Override
        public void exec() {
            try {
                // 1 - read text file
                final String[] lines = Utils.openTextFileLines(abs_text_file_path);
                if (null == lines || 0 == lines.length) {
                    Utils.log2("No images to import from " + abs_text_file_path);
                    return;
                }
                ContrastEnhancerWrapper cew = null;
                if (homogenize_contrast) {
                    cew = new ContrastEnhancerWrapper();
                    cew.showDialog();
                }
                final String sep2 = column_separator + column_separator;
                // 2 - set a base dir path if necessary
                String base_dir = null;
                // to wait on mipmap regeneration
                final Vector<Future<?>> fus = new Vector<Future<?>>();
                final LayerSet layer_set = base_layer.getParent();
                final double z_zero = base_layer.getZ();
                final AtomicInteger n_imported = new AtomicInteger(0);
                final Set<Layer> touched_layers = new HashSet<Layer>();
                final int NP = Runtime.getRuntime().availableProcessors();
                int np = NP;
                switch(np) {
                    case 1:
                    case 2:
                        break;
                    default:
                        np = np / 2;
                        break;
                }
                final ExecutorService ex = Utils.newFixedThreadPool(np, "import-images");
                final List<Future<?>> imported = new ArrayList<Future<?>>();
                final Worker wo = this;
                final String script_path;
                // If scale is at least 1/100 lower than 1, then:
                if (Math.abs(scale - (int) scale) > 0.01) {
                    // Assume source and target sigma of 0.5
                    final double sigma = Math.sqrt(Math.pow(1 / scale, 2) - 0.25);
                    final String script = new StringBuilder().append("import ij.ImagePlus;\n").append("import ij.process.ImageProcessor;\n").append("import ij.plugin.filter.GaussianBlur;\n").append("GaussianBlur blur = new GaussianBlur();\n").append(// as in ij.plugin.filter.GaussianBlur
                    "double accuracy = (imp.getType() == ImagePlus.GRAY8 || imp.getType() == ImagePlus.COLOR_RGB) ? 0.002 : 0.0002;\n").append("imp.getProcessor().setInterpolationMethod(ImageProcessor.NONE);\n").append("blur.blurGaussian(imp.getProcessor(),").append(sigma).append(',').append(sigma).append(",accuracy);\n").append("imp.setProcessor(imp.getTitle(), imp.getProcessor().resize((int)(imp.getWidth() * ").append(scale).append("), (int)(imp.getHeight() * ").append(scale).append(")));").toString();
                    File f = new File(getStorageFolder() + "resize-" + scale + ".bsh");
                    int v = 1;
                    while (f.exists()) {
                        f = new File(getStorageFolder() + "resize-" + scale + "." + v + ".bsh");
                        v++;
                    }
                    script_path = Utils.saveToFile(f, script) ? f.getAbsolutePath() : null;
                    if (null == script_path) {
                        Utils.log("Could NOT save a preprocessor script for image scaling\nat path " + f.getAbsolutePath());
                    }
                } else {
                    script_path = null;
                }
                Utils.log("Scaling script path is " + script_path);
                final AtomicReference<Triple<Integer, Integer, ByteProcessor>> last_mask = new AtomicReference<Triple<Integer, Integer, ByteProcessor>>();
                // 3 - parse each line
                for (int i = 0; i < lines.length; i++) {
                    if (Thread.currentThread().isInterrupted() || hasQuitted()) {
                        this.quit();
                        return;
                    }
                    // process line
                    // first thing is the backslash removal, before they get processed at all
                    String line = lines[i].replace('\\', '/').trim();
                    final int ic = line.indexOf('#');
                    // remove comment at end of line if any
                    if (-1 != ic)
                        line = line.substring(0, ic);
                    if (0 == line.length() || '#' == line.charAt(0))
                        continue;
                    // reduce line, so that separators are really unique
                    while (-1 != line.indexOf(sep2)) {
                        line = line.replaceAll(sep2, column_separator);
                    }
                    final String[] column = line.split(column_separator);
                    if (column.length < 4) {
                        Utils.log("Less than 4 columns: can't import from line " + i + " : " + line);
                        continue;
                    }
                    // obtain coordinates
                    double x = 0, y = 0, z = 0;
                    try {
                        x = Double.parseDouble(column[1].trim());
                        y = Double.parseDouble(column[2].trim());
                        z = Double.parseDouble(column[3].trim());
                    } catch (final NumberFormatException nfe) {
                        Utils.log("Non-numeric value in a numeric column at line " + i + " : " + line);
                        continue;
                    }
                    x *= calibration;
                    y *= calibration;
                    z = z * calibration + z_zero;
                    // obtain path
                    String path = column[0].trim();
                    if (0 == path.length())
                        continue;
                    // check if path is relative
                    if ((!IJ.isWindows() && '/' != path.charAt(0)) || (IJ.isWindows() && 1 != path.indexOf(":/"))) {
                        // path is relative.
                        if (null == base_dir) {
                            // may not be null if another thread that got the lock first set it to non-null
                            // Ask for source directory
                            final DirectoryChooser dc = new DirectoryChooser("Choose source directory");
                            final String dir = dc.getDirectory();
                            if (null == dir) {
                                // quit all threads
                                return;
                            }
                            base_dir = Utils.fixDir(dir);
                        }
                    }
                    if (null != base_dir)
                        path = base_dir + path;
                    final File f = new File(path);
                    if (!f.exists()) {
                        Utils.log("No file found for path " + path);
                        continue;
                    }
                    // will create a new Layer if necessary
                    final Layer layer = layer_set.getLayer(z, layer_thickness, true);
                    touched_layers.add(layer);
                    final String imagefilepath = path;
                    final double xx = x * scale;
                    final double yy = y * scale;
                    final Callable<Patch> creator;
                    if (column.length >= 9) {
                        creator = new Callable<Patch>() {

                            private final int parseInt(final String t) {
                                if (t.equals("-"))
                                    return -1;
                                return Integer.parseInt(t);
                            }

                            private final double parseDouble(final String t) {
                                if (t.equals("-"))
                                    return Double.NaN;
                                return Double.parseDouble(t);
                            }

                            @Override
                            public Patch call() throws Exception {
                                int o_width = parseInt(column[4].trim());
                                int o_height = parseInt(column[5].trim());
                                double min = parseDouble(column[6].trim());
                                double max = parseDouble(column[7].trim());
                                int type = parseInt(column[8].trim());
                                if (-1 == type || -1 == o_width || -1 == o_height) {
                                    // Read them from the file header
                                    final ImageFileHeader ifh = new ImageFileHeader(imagefilepath);
                                    o_width = ifh.width;
                                    o_height = ifh.height;
                                    type = ifh.type;
                                    if (!ifh.isSupportedType()) {
                                        Utils.log("Incompatible image type: " + imagefilepath);
                                        return null;
                                    }
                                }
                                ImagePlus imp = null;
                                if (Double.isNaN(min) || Double.isNaN(max)) {
                                    imp = openImagePlus(imagefilepath);
                                    min = imp.getProcessor().getMin();
                                    max = imp.getProcessor().getMax();
                                }
                                final Patch patch = new Patch(layer.getProject(), new File(imagefilepath).getName(), o_width, o_height, o_width, o_height, type, 1.0f, Color.yellow, false, min, max, new AffineTransform(1, 0, 0, 1, xx, yy), imagefilepath);
                                if (null != script_path && null != imp) {
                                    // For use in setting the preprocessor script
                                    cacheImagePlus(patch.getId(), imp);
                                }
                                return patch;
                            }
                        };
                    } else {
                        creator = new Callable<Patch>() {

                            @Override
                            public Patch call() throws Exception {
                                IJ.redirectErrorMessages();
                                final ImageFileHeader ifh = new ImageFileHeader(imagefilepath);
                                final int o_width = ifh.width;
                                final int o_height = ifh.height;
                                final int type = ifh.type;
                                if (!ifh.isSupportedType()) {
                                    Utils.log("Incompatible image type: " + imagefilepath);
                                    return null;
                                }
                                double min = 0;
                                double max = 255;
                                switch(type) {
                                    case ImagePlus.GRAY16:
                                    case ImagePlus.GRAY32:
                                        // Determine suitable min and max
                                        // TODO Stream through the image, do not load it!
                                        final ImagePlus imp = openImagePlus(imagefilepath);
                                        if (null == imp) {
                                            Utils.log("Ignoring unopenable image from " + imagefilepath);
                                            return null;
                                        }
                                        min = imp.getProcessor().getMin();
                                        max = imp.getProcessor().getMax();
                                        break;
                                }
                                // add Patch
                                final Patch patch = new Patch(layer.getProject(), new File(imagefilepath).getName(), o_width, o_height, o_width, o_height, type, 1.0f, Color.yellow, false, min, max, new AffineTransform(1, 0, 0, 1, xx, yy), imagefilepath);
                                return patch;
                            }
                        };
                    }
                    // Otherwise, images would end up loaded twice for no reason
                    if (0 == (i % (NP + NP))) {
                        final ArrayList<Future<?>> a = new ArrayList<Future<?>>(NP + NP);
                        synchronized (fus) {
                            // .add is also synchronized, fus is a Vector
                            int k = 0;
                            while (!fus.isEmpty() && k < NP) {
                                a.add(fus.remove(0));
                                k++;
                            }
                        }
                        for (final Future<?> fu : a) {
                            try {
                                if (wo.hasQuitted())
                                    return;
                                fu.get();
                            } catch (final Throwable t) {
                                t.printStackTrace();
                            }
                        }
                    }
                    imported.add(ex.submit(new Runnable() {

                        @Override
                        public void run() {
                            if (wo.hasQuitted())
                                return;
                            /* */
                            IJ.redirectErrorMessages();
                            Patch patch;
                            try {
                                patch = creator.call();
                            } catch (final Exception e) {
                                e.printStackTrace();
                                Utils.log("Could not load patch from " + imagefilepath);
                                return;
                            }
                            // Set the script if any
                            if (null != script_path) {
                                try {
                                    patch.setPreprocessorScriptPath(script_path);
                                } catch (final Throwable t) {
                                    Utils.log("FAILED to set a scaling preprocessor script to patch " + patch);
                                    IJError.print(t);
                                }
                            }
                            // Set an alpha mask to crop away the borders
                            if (border_width > 0) {
                                final Triple<Integer, Integer, ByteProcessor> m = last_mask.get();
                                if (null != m && m.a == patch.getOWidth() && m.b == patch.getOHeight()) {
                                    // Reuse
                                    patch.setAlphaMask(m.c);
                                } else {
                                    // Create new mask
                                    final ByteProcessor mask = new ByteProcessor(patch.getOWidth(), patch.getOHeight());
                                    mask.setValue(255);
                                    mask.setRoi(new Roi(border_width, border_width, mask.getWidth() - 2 * border_width, mask.getHeight() - 2 * border_width));
                                    mask.fill();
                                    patch.setAlphaMask(mask);
                                    // Store as last
                                    last_mask.set(new Triple<Integer, Integer, ByteProcessor>(mask.getWidth(), mask.getHeight(), mask));
                                }
                            }
                            if (!homogenize_contrast) {
                                fus.add(regenerateMipMaps(patch));
                            }
                            synchronized (layer) {
                                layer.add(patch, true);
                            }
                            wo.setTaskName("Imported " + (n_imported.incrementAndGet() + 1) + "/" + lines.length);
                        }
                    }));
                }
                Utils.wait(imported);
                ex.shutdown();
                if (0 == n_imported.get()) {
                    Utils.log("No images imported.");
                    return;
                }
                base_layer.getParent().setMinimumDimensions();
                Display.repaint(base_layer.getParent());
                recreateBuckets(touched_layers);
                if (homogenize_contrast) {
                    setTaskName("Enhance contrast");
                    // layer-wise (layer order is irrelevant):
                    cew.applyLayerWise(touched_layers);
                    cew.shutdown();
                }
                Utils.wait(fus);
            } catch (final Exception e) {
                IJError.print(e);
            }
        }
    }, base_layer.getProject());
}
Also used : ByteProcessor(ij.process.ByteProcessor) Set(java.util.Set) HashSet(java.util.HashSet) LayerSet(ini.trakem2.display.LayerSet) ArrayList(java.util.ArrayList) Callable(java.util.concurrent.Callable) GenericDialog(ij.gui.GenericDialog) Worker(ini.trakem2.utils.Worker) AreaList(ini.trakem2.display.AreaList) ArrayList(java.util.ArrayList) List(java.util.List) Vector(java.util.Vector) LayerSet(ini.trakem2.display.LayerSet) AtomicReference(java.util.concurrent.atomic.AtomicReference) ImageFileHeader(ini.trakem2.io.ImageFileHeader) Calibration(ij.measure.Calibration) Layer(ini.trakem2.display.Layer) ImagePlus(ij.ImagePlus) Roi(ij.gui.Roi) IOException(java.io.IOException) FormatException(loci.formats.FormatException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Triple(mpicbg.trakem2.util.Triple) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) AffineTransform(java.awt.geom.AffineTransform) ContrastEnhancerWrapper(ini.trakem2.imaging.ContrastEnhancerWrapper) File(java.io.File) Patch(ini.trakem2.display.Patch) DirectoryChooser(ij.io.DirectoryChooser)

Example 4 with Triple

use of mpicbg.trakem2.util.Triple in project TrakEM2 by trakem2.

the class ElasticLayerAlignment method preAlignStack.

private void preAlignStack(final Param param, final Project project, final List<Layer> layerRange, final Rectangle box, final Filter<Patch> filter, final ArrayList<Triple<Integer, Integer, AbstractModel<?>>> pairs) {
    final double scale = Math.min(1.0, Math.min((double) param.ppm.sift.maxOctaveSize / (double) box.width, (double) param.ppm.sift.maxOctaveSize / (double) box.height));
    /* extract and save features, overwrite cached files if requested */
    try {
        AlignmentUtils.extractAndSaveLayerFeatures(layerRange, box, scale, filter, param.ppm.sift, param.ppm.clearCache, param.ppm.maxNumThreadsSift);
    } catch (final Exception e) {
        return;
    }
    /* match and filter feature correspondences */
    int numFailures = 0;
    final double pointMatchScale = param.layerScale / scale;
    for (int i = 0; i < layerRange.size(); ++i) {
        final ArrayList<Thread> threads = new ArrayList<Thread>(param.maxNumThreads);
        final int sliceA = i;
        final Layer layerA = layerRange.get(i);
        final int range = Math.min(layerRange.size(), i + param.maxNumNeighbors + 1);
        final String layerNameA = layerName(layerA);
        for (int j = i + 1; j < range; ) J: {
            final int numThreads = Math.min(param.maxNumThreads, range - j);
            final ArrayList<Triple<Integer, Integer, AbstractModel<?>>> models = new ArrayList<Triple<Integer, Integer, AbstractModel<?>>>(numThreads);
            for (int k = 0; k < numThreads; ++k) models.add(null);
            for (int t = 0; t < numThreads && j < range; ++t, ++j) {
                final int ti = t;
                final int sliceB = j;
                final Layer layerB = layerRange.get(j);
                final String layerNameB = layerName(layerB);
                final Thread thread = new Thread() {

                    @Override
                    public void run() {
                        IJ.showProgress(sliceA, layerRange.size() - 1);
                        Utils.log("matching " + layerNameB + " -> " + layerNameA + "...");
                        ArrayList<PointMatch> candidates = null;
                        if (!param.ppm.clearCache)
                            candidates = mpicbg.trakem2.align.Util.deserializePointMatches(project, param.ppm, "layer", layerB.getId(), layerA.getId());
                        if (null == candidates) {
                            final ArrayList<Feature> fs1 = mpicbg.trakem2.align.Util.deserializeFeatures(project, param.ppm.sift, "layer", layerA.getId());
                            final ArrayList<Feature> fs2 = mpicbg.trakem2.align.Util.deserializeFeatures(project, param.ppm.sift, "layer", layerB.getId());
                            candidates = new ArrayList<PointMatch>(FloatArray2DSIFT.createMatches(fs2, fs1, param.ppm.rod));
                            /* scale the candidates */
                            for (final PointMatch pm : candidates) {
                                final Point p1 = pm.getP1();
                                final Point p2 = pm.getP2();
                                final double[] l1 = p1.getL();
                                final double[] w1 = p1.getW();
                                final double[] l2 = p2.getL();
                                final double[] w2 = p2.getW();
                                l1[0] *= pointMatchScale;
                                l1[1] *= pointMatchScale;
                                w1[0] *= pointMatchScale;
                                w1[1] *= pointMatchScale;
                                l2[0] *= pointMatchScale;
                                l2[1] *= pointMatchScale;
                                w2[0] *= pointMatchScale;
                                w2[1] *= pointMatchScale;
                            }
                            if (!mpicbg.trakem2.align.Util.serializePointMatches(project, param.ppm, "layer", layerB.getId(), layerA.getId(), candidates))
                                Utils.log("Could not store point match candidates for layers " + layerNameB + " and " + layerNameA + ".");
                        }
                        AbstractModel<?> model;
                        switch(param.expectedModelIndex) {
                            case 0:
                                model = new TranslationModel2D();
                                break;
                            case 1:
                                model = new RigidModel2D();
                                break;
                            case 2:
                                model = new SimilarityModel2D();
                                break;
                            case 3:
                                model = new AffineModel2D();
                                break;
                            case 4:
                                model = new HomographyModel2D();
                                break;
                            default:
                                return;
                        }
                        final ArrayList<PointMatch> inliers = new ArrayList<PointMatch>();
                        boolean modelFound;
                        boolean again = false;
                        try {
                            do {
                                again = false;
                                modelFound = model.filterRansac(candidates, inliers, 1000, param.maxEpsilon * param.layerScale, param.minInlierRatio, param.minNumInliers, 3);
                                if (modelFound && param.rejectIdentity) {
                                    final ArrayList<Point> points = new ArrayList<Point>();
                                    PointMatch.sourcePoints(inliers, points);
                                    if (Transforms.isIdentity(model, points, param.identityTolerance * param.layerScale)) {
                                        IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
                                        candidates.removeAll(inliers);
                                        inliers.clear();
                                        again = true;
                                    }
                                }
                            } while (again);
                        } catch (final NotEnoughDataPointsException e) {
                            modelFound = false;
                        }
                        if (modelFound) {
                            Utils.log(layerNameB + " -> " + layerNameA + ": " + inliers.size() + " corresponding features with an average displacement of " + (PointMatch.meanDistance(inliers) / param.layerScale) + "px identified.");
                            Utils.log("Estimated transformation model: " + model);
                            models.set(ti, new Triple<Integer, Integer, AbstractModel<?>>(sliceA, sliceB, model));
                        } else {
                            Utils.log(layerNameB + " -> " + layerNameA + ": no correspondences found.");
                            return;
                        }
                    }
                };
                threads.add(thread);
                thread.start();
            }
            try {
                for (final Thread thread : threads) thread.join();
            } catch (final InterruptedException e) {
                Utils.log("Establishing feature correspondences interrupted.");
                for (final Thread thread : threads) thread.interrupt();
                try {
                    for (final Thread thread : threads) thread.join();
                } catch (final InterruptedException f) {
                }
                return;
            }
            threads.clear();
            /* collect successfully matches pairs and break the search on gaps */
            for (int t = 0; t < models.size(); ++t) {
                final Triple<Integer, Integer, AbstractModel<?>> pair = models.get(t);
                if (pair == null) {
                    if (++numFailures > param.maxNumFailures) {
                        break J;
                    }
                } else {
                    numFailures = 0;
                    pairs.add(pair);
                }
            }
        }
    }
}
Also used : NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) AbstractModel(mpicbg.models.AbstractModel) ArrayList(java.util.ArrayList) HomographyModel2D(mpicbg.models.HomographyModel2D) Point(mpicbg.models.Point) Layer(ini.trakem2.display.Layer) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) Point(mpicbg.models.Point) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Triple(mpicbg.trakem2.util.Triple) PointMatch(mpicbg.models.PointMatch) RigidModel2D(mpicbg.models.RigidModel2D) AffineModel2D(mpicbg.models.AffineModel2D) TranslationModel2D(mpicbg.models.TranslationModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D)

Example 5 with Triple

use of mpicbg.trakem2.util.Triple in project TrakEM2 by trakem2.

the class RegularizedAffineLayerAlignment method exec.

/**
 * @param param
 * @param layerRange
 * @param fixedLayers
 * @param emptyLayers
 * @param box
 * @param propagateTransformAfter
 * @param filter
 * @throws Exception
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public final void exec(final Param param, final List<Layer> layerRange, final Set<Layer> fixedLayers, final Set<Layer> emptyLayers, final Rectangle box, final boolean propagateTransformBefore, final boolean propagateTransformAfter, final Filter<Patch> filter) throws Exception {
    final double scale = Math.min(1.0, Math.min((double) param.ppm.sift.maxOctaveSize / (double) box.width, (double) param.ppm.sift.maxOctaveSize / (double) box.height));
    final ExecutorService exec = ExecutorProvider.getExecutorService(1.0f / (float) param.maxNumThreads);
    /* create tiles and models for all layers */
    final ArrayList<Tile<?>> tiles = new ArrayList<Tile<?>>();
    final AbstractAffineModel2D<?> m = (AbstractAffineModel2D<?>) Util.createModel(param.desiredModelIndex);
    final AbstractAffineModel2D<?> r = (AbstractAffineModel2D<?>) Util.createModel(param.regularizerIndex);
    for (int i = 0; i < layerRange.size(); ++i) {
        if (param.regularize)
            tiles.add(new Tile(new InterpolatedAffineModel2D(m.copy(), r.copy(), param.lambda)));
        else
            tiles.add(new Tile(m.copy()));
    }
    /* collect all pairs of slices for which a model could be found */
    final ArrayList<Triple<Integer, Integer, Collection<PointMatch>>> pairs = new ArrayList<Triple<Integer, Integer, Collection<PointMatch>>>();
    /* extract and save features, overwrite cached files if requested */
    try {
        AlignmentUtils.extractAndSaveLayerFeatures(layerRange, box, scale, filter, param.ppm.sift, param.ppm.clearCache, param.ppm.maxNumThreadsSift);
    } catch (final Exception e) {
        e.printStackTrace();
        IJError.print(e);
        return;
    }
    /* match and filter feature correspondences */
    int numFailures = 0, lastA = 0;
    final double pointMatchScale = 1.0 / scale;
    final ArrayList<Future<Triple<Integer, Integer, Collection<PointMatch>>>> modelFutures = new ArrayList<Future<Triple<Integer, Integer, Collection<PointMatch>>>>();
    for (int i = 0; i < layerRange.size(); ++i) {
        final int range = Math.min(layerRange.size(), i + param.maxNumNeighbors + 1);
        for (int j = i + 1; j < range; ++j) {
            modelFutures.add(exec.submit(new CorrespondenceCallable(param, layerRange.get(i), layerRange.get(j), pointMatchScale, i, j)));
        }
    }
    // Assume that futures are ordered in Triple.a
    try {
        for (final Future<Triple<Integer, Integer, Collection<PointMatch>>> future : modelFutures) {
            final Triple<Integer, Integer, Collection<PointMatch>> pair = future.get();
            if (lastA != pair.a) {
                numFailures = 0;
                lastA = pair.a;
            }
            if (pair.c == null) {
                numFailures++;
            // TODO: Cancel futures associated with pair.a
            } else if (numFailures < param.maxNumFailures) {
                pairs.add(pair);
            }
        }
    } catch (final InterruptedException ie) {
        Utils.log("Establishing feature correspondences interrupted.");
        for (final Future<Triple<Integer, Integer, Collection<PointMatch>>> future : modelFutures) {
            future.cancel(true);
        }
        return;
    }
    /* collect successfully matches pairs and break the search on gaps */
    /*
        for ( int t = 0; t < models.size(); ++t )
        {
            final Triple< Integer, Integer, Collection< PointMatch > > pair = models.get( t );
            if ( pair == null )
            {
                if ( ++numFailures > param.maxNumFailures )
                    break J;
            }
            else
            {
                numFailures = 0;
                pairs.add( pair );
            }
        }
*/
    /* Optimization */
    final TileConfiguration tileConfiguration = new TileConfiguration();
    for (final Triple<Integer, Integer, Collection<PointMatch>> pair : pairs) {
        final Tile<?> t1 = tiles.get(pair.a);
        final Tile<?> t2 = tiles.get(pair.b);
        tileConfiguration.addTile(t1);
        tileConfiguration.addTile(t2);
        t2.connect(t1, pair.c);
    }
    for (int i = 0; i < layerRange.size(); ++i) {
        final Layer layer = layerRange.get(i);
        if (fixedLayers.contains(layer))
            tileConfiguration.fixTile(tiles.get(i));
    }
    final List<Tile<?>> nonPreAlignedTiles = tileConfiguration.preAlign();
    IJ.log("pre-aligned all but " + nonPreAlignedTiles.size() + " tiles");
    tileConfiguration.optimize(param.maxEpsilon, param.maxIterationsOptimize, param.maxPlateauwidthOptimize);
    Utils.log(new StringBuffer("Successfully optimized configuration of ").append(tiles.size()).append(" tiles:").toString());
    Utils.log("  average displacement: " + String.format("%.3f", tileConfiguration.getError()) + "px");
    Utils.log("  minimal displacement: " + String.format("%.3f", tileConfiguration.getMinError()) + "px");
    Utils.log("  maximal displacement: " + String.format("%.3f", tileConfiguration.getMaxError()) + "px");
    if (propagateTransformBefore || propagateTransformAfter) {
        final Layer first = layerRange.get(0);
        final List<Layer> layers = first.getParent().getLayers();
        if (propagateTransformBefore) {
            final AffineTransform b = translateAffine(box, ((Affine2D<?>) tiles.get(0).getModel()).createAffine());
            final int firstLayerIndex = first.getParent().getLayerIndex(first.getId());
            for (int i = 0; i < firstLayerIndex; ++i) applyTransformToLayer(layers.get(i), b, filter);
        }
        if (propagateTransformAfter) {
            final Layer last = layerRange.get(layerRange.size() - 1);
            final AffineTransform b = translateAffine(box, ((Affine2D<?>) tiles.get(tiles.size() - 1).getModel()).createAffine());
            final int lastLayerIndex = last.getParent().getLayerIndex(last.getId());
            for (int i = lastLayerIndex + 1; i < layers.size(); ++i) applyTransformToLayer(layers.get(i), b, filter);
        }
    }
    for (int i = 0; i < layerRange.size(); ++i) {
        final AffineTransform b = translateAffine(box, ((Affine2D<?>) tiles.get(i).getModel()).createAffine());
        applyTransformToLayer(layerRange.get(i), b, filter);
    }
    Utils.log("Done.");
}
Also used : ArrayList(java.util.ArrayList) InterpolatedAffineModel2D(mpicbg.models.InterpolatedAffineModel2D) Tile(mpicbg.models.Tile) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) Layer(ini.trakem2.display.Layer) Point(mpicbg.models.Point) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) IllDefinedDataPointsException(mpicbg.models.IllDefinedDataPointsException) Triple(mpicbg.trakem2.util.Triple) PointMatch(mpicbg.models.PointMatch) ExecutorService(java.util.concurrent.ExecutorService) Collection(java.util.Collection) Future(java.util.concurrent.Future) AffineTransform(java.awt.geom.AffineTransform) TileConfiguration(mpicbg.models.TileConfiguration)

Aggregations

ArrayList (java.util.ArrayList)5 Triple (mpicbg.trakem2.util.Triple)5 Layer (ini.trakem2.display.Layer)4 NotEnoughDataPointsException (mpicbg.models.NotEnoughDataPointsException)4 Point (mpicbg.models.Point)4 PointMatch (mpicbg.models.PointMatch)4 Patch (ini.trakem2.display.Patch)3 ExecutorService (java.util.concurrent.ExecutorService)3 Future (java.util.concurrent.Future)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)3 AffineModel2D (mpicbg.models.AffineModel2D)3 RigidModel2D (mpicbg.models.RigidModel2D)3 SimilarityModel2D (mpicbg.models.SimilarityModel2D)3 TranslationModel2D (mpicbg.models.TranslationModel2D)3 ByteProcessor (ij.process.ByteProcessor)2 LayerSet (ini.trakem2.display.LayerSet)2 AffineTransform (java.awt.geom.AffineTransform)2 Spring (mpicbg.models.Spring)2 SpringMesh (mpicbg.models.SpringMesh)2 Vertex (mpicbg.models.Vertex)2