use of mpicbg.models.AbstractAffineModel2D in project TrakEM2 by trakem2.
the class AlignTask method alignGraphs.
private static final boolean alignGraphs(final Align.Param p, final Layer layer1, final Layer layer2, final Iterable<Tile<?>> graph1, final Iterable<Tile<?>> graph2) {
final Align.Param cp = p.clone();
final Selection selection1 = new Selection(null);
for (final Tile<?> tile : graph1) selection1.add(((AbstractAffineTile2D<?>) tile).getPatch());
final Rectangle graph1Box = selection1.getBox();
final Selection selection2 = new Selection(null);
for (final Tile<?> tile : graph2) selection2.add(((AbstractAffineTile2D<?>) tile).getPatch());
final Rectangle graph2Box = selection2.getBox();
final int maxLength = Math.max(Math.max(Math.max(graph1Box.width, graph1Box.height), graph2Box.width), graph2Box.height);
// final double scale = ( double )cp.sift.maxOctaveSize / maxLength;
/* rather ad hoc but we cannot just scale this to maxOctaveSize */
cp.sift.maxOctaveSize = Math.min(maxLength, 2 * p.sift.maxOctaveSize);
/* make sure that, despite rounding issues from scale, it is >= image size */
final double scale = (double) (cp.sift.maxOctaveSize - 1) / maxLength;
// cp.maxEpsilon *= scale;
final FloatArray2DSIFT sift = new FloatArray2DSIFT(cp.sift);
final SIFT ijSIFT = new SIFT(sift);
final ArrayList<Feature> features1 = new ArrayList<Feature>();
final ArrayList<Feature> features2 = new ArrayList<Feature>();
final ArrayList<PointMatch> candidates = new ArrayList<PointMatch>();
final ArrayList<PointMatch> inliers = new ArrayList<PointMatch>();
long s = System.currentTimeMillis();
ijSIFT.extractFeatures(layer1.getProject().getLoader().getFlatImage(layer1, graph1Box, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, selection1.getSelected(Patch.class), false, Color.GRAY).getProcessor(), features1);
Utils.log(features1.size() + " features extracted for graphs in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
ijSIFT.extractFeatures(layer2.getProject().getLoader().getFlatImage(layer2, graph2Box, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, selection2.getSelected(Patch.class), false, Color.GRAY).getProcessor(), features2);
Utils.log(features2.size() + " features extracted for graphs in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
boolean modelFound = false;
if (features1.size() > 0 && features2.size() > 0) {
s = System.currentTimeMillis();
FeatureTransform.matchFeatures(features1, features2, candidates, cp.rod);
final AbstractAffineModel2D<?> model;
switch(cp.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return false;
}
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, cp.maxEpsilon, cp.minInlierRatio, cp.minNumInliers, 3);
if (modelFound && cp.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, cp.identityTolerance)) {
IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
Utils.log("Model found for graphs in layer \"" + layer1.getTitle() + "\" and \"" + layer2.getTitle() + "\":\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + (model.getCost() / scale) + " px\n took " + (System.currentTimeMillis() - s) + " ms");
final AffineTransform b = new AffineTransform();
b.translate(graph2Box.x, graph2Box.y);
b.scale(1.0f / scale, 1.0f / scale);
b.concatenate(model.createAffine());
b.scale(scale, scale);
b.translate(-graph1Box.x, -graph1Box.y);
for (final Displayable d : selection1.getSelected(Patch.class)) d.getAffineTransform().preConcatenate(b);
/* assign patch affine transformation to the tile model */
for (final Tile<?> t : graph1) ((AbstractAffineTile2D<?>) t).initModel();
Display.repaint(layer1);
} else
IJ.log("No model found for graphs in layer \"" + layer1.getTitle() + "\" and \"" + layer2.getTitle() + "\":\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
}
return modelFound;
}
use of mpicbg.models.AbstractAffineModel2D in project TrakEM2 by trakem2.
the class ElasticMontage method exec.
@SuppressWarnings("deprecation")
public final void exec(final Param param, final List<Patch> patches, final Set<Patch> fixedPatches) throws Exception {
/* free memory */
patches.get(0).getProject().getLoader().releaseAll();
/* create tiles and models for all patches */
final ArrayList<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
final ArrayList<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
Align.tilesFromPatches(param.po, patches, fixedPatches, tiles, fixedTiles);
if (!param.isAligned) {
Align.alignTiles(param.po, tiles, fixedTiles, param.tilesAreInPlace, param.maxNumThreads);
/* Apply the estimated affine transform to patches */
for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.createAffine());
Display.update();
}
/* generate tile pairs for all by now overlapping tiles */
final ArrayList<AbstractAffineTile2D<?>[]> tilePairs = new ArrayList<AbstractAffineTile2D<?>[]>();
AbstractAffineTile2D.pairOverlappingTiles(tiles, tilePairs);
/* check if there was any pair */
if (tilePairs.size() == 0) {
Utils.log("Elastic montage could not find any overlapping patches after pre-montaging.");
return;
}
Utils.log(tilePairs.size() + " pairs of patches will be block-matched...");
/* make pairwise global models local */
final ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>> pairs = new ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>>();
/*
* The following casting madness is necessary to get this code compiled
* with Sun/Oracle Java 6 which otherwise generates an inconvertible
* type exception.
*
* TODO Remove as soon as this bug is fixed in Sun/Oracle javac.
*/
for (final AbstractAffineTile2D<?>[] pair : tilePairs) {
final AbstractAffineModel2D<?> m;
switch(param.po.desiredModelIndex) {
case 0:
final TranslationModel2D t = (TranslationModel2D) (Object) pair[1].getModel().createInverse();
t.concatenate((TranslationModel2D) (Object) pair[0].getModel());
m = t;
break;
case 1:
final RigidModel2D r = (RigidModel2D) (Object) pair[1].getModel().createInverse();
r.concatenate((RigidModel2D) (Object) pair[0].getModel());
m = r;
break;
case 2:
final SimilarityModel2D s = (SimilarityModel2D) (Object) pair[1].getModel().createInverse();
s.concatenate((SimilarityModel2D) (Object) pair[0].getModel());
m = s;
break;
case 3:
final AffineModel2D a = (AffineModel2D) (Object) pair[1].getModel().createInverse();
a.concatenate((AffineModel2D) (Object) pair[0].getModel());
m = a;
break;
default:
m = null;
}
pairs.add(new Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>(pair[0], pair[1], m));
}
/* Elastic alignment */
/* Initialization */
final double springTriangleHeightTwice = 2 * Math.sqrt(0.75 * param.springLengthSpringMesh * param.springLengthSpringMesh);
final ArrayList<SpringMesh> meshes = new ArrayList<SpringMesh>(tiles.size());
final HashMap<AbstractAffineTile2D<?>, SpringMesh> tileMeshMap = new HashMap<AbstractAffineTile2D<?>, SpringMesh>();
for (final AbstractAffineTile2D<?> tile : tiles) {
final double w = tile.getWidth();
final double h = tile.getHeight();
final int numX = Math.max(2, (int) Math.ceil(w / param.springLengthSpringMesh) + 1);
final int numY = Math.max(2, (int) Math.ceil(h / springTriangleHeightTwice) + 1);
final double wMesh = (numX - 1) * param.springLengthSpringMesh;
final double hMesh = (numY - 1) * springTriangleHeightTwice;
final SpringMesh mesh = new SpringMesh(numX, numY, wMesh, hMesh, param.stiffnessSpringMesh, param.maxStretchSpringMesh * param.bmScale, param.dampSpringMesh);
meshes.add(mesh);
tileMeshMap.put(tile, mesh);
}
// final int blockRadius = Math.max( 32, Util.roundPos( param.springLengthSpringMesh / 2 ) );
final int blockRadius = Math.max(Util.roundPos(16 / param.bmScale), param.bmBlockRadius);
/**
* TODO set this something more than the largest error by the approximate model
*/
final int searchRadius = param.bmSearchRadius;
final AbstractModel<?> localSmoothnessFilterModel = mpicbg.trakem2.align.Util.createModel(param.bmLocalModelIndex);
for (final Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform> pair : pairs) {
final AbstractAffineTile2D<?> t1 = pair.a;
final AbstractAffineTile2D<?> t2 = pair.b;
final SpringMesh m1 = tileMeshMap.get(t1);
final SpringMesh m2 = tileMeshMap.get(t2);
final ArrayList<PointMatch> pm12 = new ArrayList<PointMatch>();
final ArrayList<PointMatch> pm21 = new ArrayList<PointMatch>();
final ArrayList<Vertex> v1 = m1.getVertices();
final ArrayList<Vertex> v2 = m2.getVertices();
final String patchName1 = patchName(t1.getPatch());
final String patchName2 = patchName(t2.getPatch());
final PatchImage pi1 = t1.getPatch().createTransformedImage();
if (pi1 == null) {
Utils.log("Patch `" + patchName1 + "' failed generating a transformed image. Skipping...");
continue;
}
final PatchImage pi2 = t2.getPatch().createTransformedImage();
if (pi2 == null) {
Utils.log("Patch `" + patchName2 + "' failed generating a transformed image. Skipping...");
continue;
}
final FloatProcessor fp1 = (FloatProcessor) pi1.target.convertToFloat();
final ByteProcessor mask1 = pi1.getMask();
final FloatProcessor fpMask1 = mask1 == null ? null : scaleByte(mask1);
final FloatProcessor fp2 = (FloatProcessor) pi2.target.convertToFloat();
final ByteProcessor mask2 = pi2.getMask();
final FloatProcessor fpMask2 = mask2 == null ? null : scaleByte(mask2);
if (!fixedTiles.contains(t1)) {
BlockMatching.matchByMaximalPMCC(fp1, fp2, fpMask1, fpMask2, param.bmScale, pair.c, blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v1, pm12, new ErrorStatistic(1));
if (param.bmUseLocalSmoothnessFilter) {
Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondence candidates.");
localSmoothnessFilterModel.localSmoothnessFilter(pm12, pm12, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
Utils.log("`" + patchName1 + "' > `" + patchName2 + "': " + pm12.size() + " candidates passed local smoothness filter.");
} else {
Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondences.");
}
} else {
Utils.log("Skipping fixed patch `" + patchName1 + "'.");
}
if (!fixedTiles.contains(t2)) {
BlockMatching.matchByMaximalPMCC(fp2, fp1, fpMask2, fpMask1, param.bmScale, pair.c.createInverse(), blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v2, pm21, new ErrorStatistic(1));
if (param.bmUseLocalSmoothnessFilter) {
Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondence candidates.");
localSmoothnessFilterModel.localSmoothnessFilter(pm21, pm21, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
Utils.log("`" + patchName1 + "' < `" + patchName2 + "': " + pm21.size() + " candidates passed local smoothness filter.");
} else {
Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondences.");
}
} else {
Utils.log("Skipping fixed patch `" + patchName2 + "'.");
}
for (final PointMatch pm : pm12) {
final Vertex p1 = (Vertex) pm.getP1();
final Vertex p2 = new Vertex(pm.getP2());
p1.addSpring(p2, new Spring(0, 1.0f));
m2.addPassiveVertex(p2);
}
for (final PointMatch pm : pm21) {
final Vertex p1 = (Vertex) pm.getP1();
final Vertex p2 = new Vertex(pm.getP2());
p1.addSpring(p2, new Spring(0, 1.0f));
m1.addPassiveVertex(p2);
}
}
/* initialize */
for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) entry.getValue().init(entry.getKey().getModel());
/* optimize the meshes */
try {
final long t0 = System.currentTimeMillis();
IJ.log("Optimizing spring meshes...");
if (param.useLegacyOptimizer) {
Utils.log(" ...using legacy optimizer...");
SpringMesh.optimizeMeshes2(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
} else {
SpringMesh.optimizeMeshes(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
}
IJ.log("Done optimizing spring meshes. Took " + (System.currentTimeMillis() - t0) + " ms");
} catch (final NotEnoughDataPointsException e) {
Utils.log("There were not enough data points to get the spring mesh optimizing.");
e.printStackTrace();
return;
}
/* apply */
for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) {
final AbstractAffineTile2D<?> tile = entry.getKey();
if (!fixedTiles.contains(tile)) {
final Patch patch = tile.getPatch();
final SpringMesh mesh = entry.getValue();
final Set<PointMatch> matches = mesh.getVA().keySet();
Rectangle box = patch.getCoordinateTransformBoundingBox();
/* compensate for existing coordinate transform bounding box */
for (final PointMatch pm : matches) {
final Point p1 = pm.getP1();
final double[] l = p1.getL();
l[0] += box.x;
l[1] += box.y;
}
final ThinPlateSplineTransform mlt = ElasticLayerAlignment.makeTPS(matches);
patch.appendCoordinateTransform(mlt);
box = patch.getCoordinateTransformBoundingBox();
patch.getAffineTransform().setToTranslation(box.x, box.y);
patch.updateInDatabase("transform");
patch.updateBucket();
patch.updateMipMaps();
}
}
Utils.log("Done.");
}
use of mpicbg.models.AbstractAffineModel2D in project TrakEM2 by trakem2.
the class Align method createMLST.
/**
* Temporary helper method that creates
* @param matches
* @param alpha
* @return
* @throws Exception
*/
public static final MovingLeastSquaresTransform2 createMLST(final Collection<PointMatch> matches, final double alpha) throws Exception {
final MovingLeastSquaresTransform2 mlst = new MovingLeastSquaresTransform2();
mlst.setAlpha(alpha);
Class<? extends AbstractAffineModel2D<?>> c = AffineModel2D.class;
switch(matches.size()) {
case 1:
c = TranslationModel2D.class;
break;
case 2:
c = SimilarityModel2D.class;
break;
default:
break;
}
mlst.setModel(c);
mlst.setMatches(matches);
return mlst;
}
use of mpicbg.models.AbstractAffineModel2D in project TrakEM2 by trakem2.
the class Align method fetchPointMatches.
/**
* Fetch a {@link Collection} of corresponding
* {@link Feature SIFT-features}. Both {@link Feature SIFT-features} and
* {@linkplain PointMatch corresponding points} are cached to disk.
*
* @param p
* @param t1
* @param t2
* @return
* <dl>
* <dt>null</dt><dd>if matching failed for some reasons</dd>
* <dt>empty {@link Collection}</dt><dd>if there was no consistent set
* of {@link PointMatch matches}</dd>
* <dt>{@link Collection} of {@link PointMatch PointMatches}</dt>
* <dd>if there was a consistent set of {@link PointMatch
* PointMatches}</dd>
* </dl>
*/
protected static final Collection<PointMatch> fetchPointMatches(final Param p, final AbstractAffineTile2D<?> t1, final AbstractAffineTile2D<?> t2) {
final Collection<PointMatch> pointMatches = deserializePointMatches(p, t1, t2);
if (pointMatches == null) {
final List<PointMatch> candidates = new ArrayList<PointMatch>();
final List<PointMatch> inliers = new ArrayList<PointMatch>();
final long s = System.currentTimeMillis();
FeatureTransform.matchFeatures(fetchFeatures(p, t1), fetchFeatures(p, t2), candidates, p.rod);
final AbstractAffineModel2D<?> model;
switch(p.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return null;
}
final boolean modelFound = findModel(model, candidates, inliers, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, p.rejectIdentity, p.identityTolerance);
if (modelFound)
Utils.log("Model found for tiles \"" + t1.getPatch() + "\" and \"" + t2.getPatch() + "\":\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + model.getCost() + " px\n took " + (System.currentTimeMillis() - s) + " ms");
else
Utils.log("No model found for tiles \"" + t1.getPatch() + "\" and \"" + t2.getPatch() + "\":\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
if (!serializePointMatches(p, t1, t2, pointMatches))
Utils.log("Saving point matches failed for tile \"" + t1.getPatch() + "\" and tile \"" + t2.getPatch() + "\"");
}
return pointMatches;
}
use of mpicbg.models.AbstractAffineModel2D in project TrakEM2 by trakem2.
the class AlignLayersTask method alignLayersNonLinearlyJob.
public static final void alignLayersNonLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
// will reverse order if necessary
final List<Layer> layerRange = layerSet.getLayers(first, last);
final Align.Param p = Align.param.clone();
// Remove all empty layers
for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
if (!it.next().contains(Patch.class, true)) {
it.remove();
}
}
if (0 == layerRange.size()) {
Utils.log("No layers in range show any images!");
return;
}
/* do not work if there is only one layer selected */
if (layerRange.size() < 2)
return;
final List<Patch> all = new ArrayList<Patch>();
for (final Layer la : layerRange) {
for (final Patch patch : la.getAll(Patch.class)) {
if (null != filter && !filter.accept(patch))
continue;
all.add(patch);
}
}
AlignTask.transformPatchesAndVectorData(all, new Runnable() {
@Override
public void run() {
// ///
final Loader loader = layerSet.getProject().getLoader();
// Not concurrent safe! So two copies, one per layer and Thread:
final SIFT ijSIFT1 = new SIFT(new FloatArray2DSIFT(p.sift));
final SIFT ijSIFT2 = new SIFT(new FloatArray2DSIFT(p.sift));
final Collection<Feature> features1 = new ArrayList<Feature>();
final Collection<Feature> features2 = new ArrayList<Feature>();
final List<PointMatch> candidates = new ArrayList<PointMatch>();
final List<PointMatch> inliers = new ArrayList<PointMatch>();
final int n_proc = Runtime.getRuntime().availableProcessors() > 1 ? 2 : 1;
final ExecutorService exec = Utils.newFixedThreadPool(n_proc, "alignLayersNonLinearly");
List<Patch> previousPatches = null;
int s = 0;
for (int i = 1; i < layerRange.size(); ++i) {
if (Thread.currentThread().isInterrupted())
break;
final Layer layer1 = layerRange.get(i - 1);
final Layer layer2 = layerRange.get(i);
final long t0 = System.currentTimeMillis();
features1.clear();
features2.clear();
final Rectangle box1 = null == fov ? layer1.getMinimalBoundingBox(Patch.class, true) : fov;
final Rectangle box2 = null == fov ? layer2.getMinimalBoundingBox(Patch.class, true) : fov;
/* calculate the common scale factor for both flat images */
final double scale = Math.min(1.0f, (double) p.sift.maxOctaveSize / (double) Math.max(box1.width, Math.max(box1.height, Math.max(box2.width, box2.height))));
final List<Patch> patches1;
if (null == previousPatches) {
patches1 = layer1.getAll(Patch.class);
if (null != filter) {
for (final Iterator<Patch> it = patches1.iterator(); it.hasNext(); ) {
if (!filter.accept(it.next()))
it.remove();
}
}
} else {
patches1 = previousPatches;
}
final List<Patch> patches2 = layer2.getAll(Patch.class);
if (null != filter) {
for (final Iterator<Patch> it = patches2.iterator(); it.hasNext(); ) {
if (!filter.accept(it.next()))
it.remove();
}
}
final Future<ImageProcessor> fu1 = exec.submit(new Callable<ImageProcessor>() {
@Override
public ImageProcessor call() {
final ImageProcessor ip1 = loader.getFlatImage(layer1, box1, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches1, true).getProcessor();
ijSIFT1.extractFeatures(ip1, features1);
Utils.log(features1.size() + " features extracted in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
return ip1;
}
});
final Future<ImageProcessor> fu2 = exec.submit(new Callable<ImageProcessor>() {
@Override
public ImageProcessor call() {
final ImageProcessor ip2 = loader.getFlatImage(layer2, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches2, true).getProcessor();
ijSIFT2.extractFeatures(ip2, features2);
Utils.log(features2.size() + " features extracted in layer \"" + layer2.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
return ip2;
}
});
final ImageProcessor ip1, ip2;
try {
ip1 = fu1.get();
ip2 = fu2.get();
} catch (final Exception e) {
IJError.print(e);
return;
}
if (features1.size() > 0 && features2.size() > 0) {
final long t1 = System.currentTimeMillis();
candidates.clear();
FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
final AbstractAffineModel2D<?> model;
switch(p.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return;
}
boolean modelFound;
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
if (modelFound && p.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, p.identityTolerance)) {
IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
IJ.log("Model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + (model.getCost() / scale) + " px\n took " + (System.currentTimeMillis() - t1) + " ms");
final ImagePlus imp1 = new ImagePlus("target", ip1);
final ImagePlus imp2 = new ImagePlus("source", ip2);
final List<Point> sourcePoints = new ArrayList<Point>();
final List<Point> targetPoints = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, sourcePoints);
PointMatch.targetPoints(inliers, targetPoints);
imp2.setRoi(Util.pointsToPointRoi(sourcePoints));
imp1.setRoi(Util.pointsToPointRoi(targetPoints));
final ImageProcessor mask1 = ip1.duplicate();
mask1.threshold(1);
final ImageProcessor mask2 = ip2.duplicate();
mask2.threshold(1);
final Transformation warp = bUnwarpJ_.computeTransformationBatch(imp2, imp1, mask2, mask1, elasticParam);
final CubicBSplineTransform transf = new CubicBSplineTransform();
transf.set(warp.getIntervals(), warp.getDirectDeformationCoefficientsX(), warp.getDirectDeformationCoefficientsY(), imp2.getWidth(), imp2.getHeight());
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
// Transform desired patches only
for (final Patch patch : patches2) {
try {
final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
final AffineTransform at = patch.getAffineTransform();
final AffineTransform pat = new AffineTransform();
pat.scale(scale, scale);
pat.translate(-box2.x, -box2.y);
pat.concatenate(at);
pat.translate(-pbox.x, -pbox.y);
final mpicbg.trakem2.transform.AffineModel2D toWorld = new mpicbg.trakem2.transform.AffineModel2D();
toWorld.set(pat);
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
// move the patch into the global space where bUnwarpJ calculated the transformation
ctl.add(toWorld);
// Apply non-linear transformation
ctl.add(transf);
// move it back
ctl.add(toWorld.createInverse());
patch.appendCoordinateTransform(ctl);
fus.add(patch.updateMipMaps());
// Compensate for offset between boxes
final AffineTransform offset = new AffineTransform();
offset.translate(box1.x - box2.x, box1.y - box2.y);
offset.concatenate(at);
patch.setAffineTransform(offset);
} catch (final Exception e) {
e.printStackTrace();
}
}
// await regeneration of all mipmaps
Utils.wait(fus);
Display.repaint(layer2);
} else
IJ.log("No model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
}
IJ.showProgress(++s, layerRange.size());
// for next iteration
previousPatches = patches2;
}
exec.shutdown();
if (propagateTransform)
Utils.log("Propagation not implemented yet for non-linear layer alignment.");
/* // CANNOT be done (at least not trivially:
* //an appropriate "scale" cannot be computed, and the box2 is part of the spline computation.
if ( propagateTransform && null != lastTransform )
{
for (final Layer la : l.getParent().getLayers(last > first ? last +1 : first -1, last > first ? l.getParent().size() -1 : 0)) {
// Transform visible patches only
final Rectangle box2 = la.getMinimalBoundingBox( Patch.class, true );
for ( final Displayable disp : la.getDisplayables( Patch.class, true ) )
{
// ...
}
}
}
*/
}
});
// end of transformPatchesAndVectorData
}
Aggregations