use of mpicbg.models.PointMatch in project TrakEM2 by trakem2.
the class Display method insertStack.
/**
* @param stack_patch is just a Patch of a series of Patch that make a stack of Patches.
*/
private boolean insertStack(final ProjectThing target_landmarks, final Project source, final ProjectThing source_landmarks, final Patch stack_patch) {
final List<Ball> l1 = new ArrayList<Ball>();
final List<Ball> l2 = new ArrayList<Ball>();
// source is the one that has the stack_patch
final Collection<ProjectThing> b1s = source_landmarks.findChildrenOfType("ball");
// target is this
final Collection<ProjectThing> b2s = target_landmarks.findChildrenOfType("ball");
final HashSet<String> seen = new HashSet<String>();
for (final ProjectThing b1 : b1s) {
final Ball ball1 = (Ball) b1.getObject();
if (null == ball1) {
Utils.log("ERROR: there's an empty 'ball' node in target project" + project.toString());
return false;
}
final String title1 = ball1.getTitle();
for (final ProjectThing b2 : b2s) {
final Ball ball2 = (Ball) b2.getObject();
if (null == ball2) {
Utils.log("ERROR: there's an empty 'ball' node in source project" + source.toString());
return false;
}
if (title1.equals(ball2.getTitle())) {
if (seen.contains(title1))
continue;
seen.add(title1);
l1.add(ball1);
l2.add(ball2);
}
}
}
if (l1.size() < 4) {
Utils.log("ERROR: found only " + l1.size() + " common landmarks: needs at least 4!");
return false;
}
// Extract coordinates of source project landmarks, in patch stack coordinate space
final List<double[]> c1 = new ArrayList<double[]>();
for (final Ball ball1 : l1) {
final Map<Layer, double[]> m = ball1.getRawBalls();
if (1 != m.size()) {
Utils.log("ERROR: ball object " + ball1 + " from target project " + project + " has " + m.size() + " balls instead of just 1.");
return false;
}
final Map.Entry<Layer, double[]> e = m.entrySet().iterator().next();
final Layer layer = e.getKey();
final double[] xyr = e.getValue();
final double[] fin = new double[] { xyr[0], xyr[1] };
final AffineTransform affine = ball1.getAffineTransformCopy();
try {
affine.preConcatenate(stack_patch.getAffineTransform().createInverse());
} catch (final Exception nite) {
IJError.print(nite);
return false;
}
final double[] fout = new double[2];
affine.transform(fin, 0, fout, 0, 1);
c1.add(new double[] { fout[0], fout[1], layer.getParent().indexOf(layer) });
}
// Extract coordinates of target (this) project landmarks, in calibrated world space
final List<double[]> c2 = new ArrayList<double[]>();
for (final Ball ball2 : l2) {
final double[][] b = ball2.getBalls();
if (1 != b.length) {
Utils.log("ERROR: ball object " + ball2 + " from source project " + source + " has " + b.length + " balls instead of just 1.");
return false;
}
final double[] fin = new double[] { b[0][0], b[0][1] };
final AffineTransform affine = ball2.getAffineTransformCopy();
final double[] fout = new double[2];
affine.transform(fin, 0, fout, 0, 1);
c2.add(new double[] { fout[0], fout[1], b[0][2] });
}
// Print landmarks:
Utils.log("Landmarks:");
for (Iterator<double[]> it1 = c1.iterator(), it2 = c2.iterator(); it1.hasNext(); ) {
Utils.log(Utils.toString(it1.next()) + " <--> " + Utils.toString(it2.next()));
}
// Create point matches
final List<PointMatch> pm = new ArrayList<PointMatch>();
for (Iterator<double[]> it1 = c1.iterator(), it2 = c2.iterator(); it1.hasNext(); ) {
pm.add(new mpicbg.models.PointMatch(new mpicbg.models.Point(it1.next()), new mpicbg.models.Point(it2.next())));
}
// Estimate AffineModel3D
final AffineModel3D aff3d = new AffineModel3D();
try {
aff3d.fit(pm);
} catch (final Exception e) {
IJError.print(e);
return false;
}
// Create and add the Stack
final String path = stack_patch.getImageFilePath();
final Stack st = new Stack(project, new File(path).getName(), 0, 0, getLayerSet().getLayers().get(0), path);
st.setInvertibleCoordinateTransform(aff3d);
getLayerSet().add(st);
return true;
}
use of mpicbg.models.PointMatch in project TrakEM2 by trakem2.
the class NonLinearTransformMode method createCT.
private CoordinateTransform createCT() throws Exception {
final Collection<PointMatch> pm = new ArrayList<PointMatch>();
for (final Point p : points) {
pm.add(new PointMatch(new Point(p.getL()), new Point(p.getW())));
}
/*
* TODO replace this with the desired parameters of the transformation
*/
final MovingLeastSquaresTransform2 mlst = new MovingLeastSquaresTransform2();
mlst.setAlpha(1.0f);
Class<? extends AbstractAffineModel2D<?>> c = AffineModel2D.class;
switch(points.size()) {
case 1:
c = TranslationModel2D.class;
break;
case 2:
c = SimilarityModel2D.class;
break;
default:
break;
}
mlst.setModel(c);
mlst.setMatches(pm);
return mlst;
}
use of mpicbg.models.PointMatch in project TrakEM2 by trakem2.
the class DistortionCorrectionTask method run.
public static final void run(final CorrectDistortionFromSelectionParam p, final List<Patch> patches, final Displayable active, final Layer layer, final Worker worker) {
/* no multiple inheritance, so p cannot be an Align.ParamOptimize, working around legacy by copying data into one ... */
final Align.ParamOptimize ap = new Align.ParamOptimize();
ap.sift.set(p.sift);
ap.desiredModelIndex = p.desiredModelIndex;
ap.expectedModelIndex = p.expectedModelIndex;
ap.maxEpsilon = p.maxEpsilon;
ap.minInlierRatio = p.minInlierRatio;
ap.rod = p.rod;
ap.identityTolerance = p.identityTolerance;
ap.lambda = p.lambdaRegularize;
ap.maxIterations = p.maxIterationsOptimize;
ap.maxPlateauwidth = p.maxPlateauwidthOptimize;
ap.minNumInliers = p.minNumInliers;
ap.regularize = p.regularize;
ap.regularizerModelIndex = p.regularizerIndex;
ap.rejectIdentity = p.rejectIdentity;
/**
* Get all patches that will be affected.
*/
final List<Patch> allPatches = new ArrayList<Patch>();
for (final Layer l : layer.getParent().getLayers().subList(p.firstLayerIndex, p.lastLayerIndex + 1)) for (final Displayable d : l.getDisplayables(Patch.class)) allPatches.add((Patch) d);
/**
* Unset the coordinate transforms of all patches if desired.
*/
if (p.clearTransform) {
if (worker != null)
worker.setTaskName("Clearing present transforms");
setCoordinateTransform(allPatches, null, Runtime.getRuntime().availableProcessors());
Display.repaint();
}
if (worker != null)
worker.setTaskName("Establishing SIFT correspondences");
final List<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
final List<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
final List<Patch> fixedPatches = new ArrayList<Patch>();
if (active != null && active instanceof Patch)
fixedPatches.add((Patch) active);
Align.tilesFromPatches(ap, patches, fixedPatches, tiles, fixedTiles);
final List<AbstractAffineTile2D<?>[]> tilePairs = new ArrayList<AbstractAffineTile2D<?>[]>();
if (p.tilesAreInPlace)
AbstractAffineTile2D.pairOverlappingTiles(tiles, tilePairs);
else
AbstractAffineTile2D.pairTiles(tiles, tilePairs);
AbstractAffineTile2D<?> fixedTile = null;
if (fixedTiles.size() > 0)
fixedTile = fixedTiles.get(0);
else
fixedTile = tiles.get(0);
Align.connectTilePairs(ap, tiles, tilePairs, p.maxNumThreadsSift, p.multipleHypotheses);
/**
* Shift all local coordinates into the original image frame
*/
for (final AbstractAffineTile2D<?> tile : tiles) {
final Rectangle box = tile.getPatch().getCoordinateTransformBoundingBox();
for (final PointMatch m : tile.getMatches()) {
final double[] l = m.getP1().getL();
final double[] w = m.getP1().getW();
l[0] += box.x;
l[1] += box.y;
w[0] = l[0];
w[1] = l[1];
}
}
if (Thread.currentThread().isInterrupted())
return;
final List<Set<Tile<?>>> graphs = AbstractAffineTile2D.identifyConnectedGraphs(tiles);
if (graphs.size() > 1)
Utils.log("Could not interconnect all images with correspondences. ");
final List<AbstractAffineTile2D<?>> interestingTiles;
/**
* Find largest graph.
*/
Set<Tile<?>> largestGraph = null;
for (final Set<Tile<?>> graph : graphs) if (largestGraph == null || largestGraph.size() < graph.size())
largestGraph = graph;
interestingTiles = new ArrayList<AbstractAffineTile2D<?>>();
for (final Tile<?> t : largestGraph) interestingTiles.add((AbstractAffineTile2D<?>) t);
if (Thread.currentThread().isInterrupted())
return;
Utils.log("Estimating lens model:");
/* initialize with pure affine */
Align.optimizeTileConfiguration(ap, interestingTiles, fixedTiles);
/* measure the current error */
double e = 0;
int n = 0;
for (final AbstractAffineTile2D<?> t : interestingTiles) for (final PointMatch pm : t.getMatches()) {
e += pm.getDistance();
++n;
}
e /= n;
double dEpsilon_i = 0;
double epsilon_i = e;
double dEpsilon_0 = 0;
NonLinearTransform lensModel = null;
Utils.log("0: epsilon = " + e);
/* Store original point locations */
final HashMap<Point, Point> originalPoints = new HashMap<Point, Point>();
for (final AbstractAffineTile2D<?> t : interestingTiles) for (final PointMatch pm : t.getMatches()) originalPoints.put(pm.getP1(), pm.getP1().clone());
/* ad hoc conditions to terminate iteration:
* small improvement ( 1/1000) relative to first iteration
* less than 20 iterations
* at least 2 iterations */
for (int i = 1; i < 20 && (i < 2 || dEpsilon_i <= dEpsilon_0 / 1000); ++i) {
if (Thread.currentThread().isInterrupted())
return;
/* Some data shuffling for the lens correction interface */
final List<PointMatchCollectionAndAffine> matches = new ArrayList<PointMatchCollectionAndAffine>();
for (final AbstractAffineTile2D<?>[] tilePair : tilePairs) {
final AffineTransform a = tilePair[0].createAffine();
a.preConcatenate(tilePair[1].getModel().createInverseAffine());
final Collection<PointMatch> commonMatches = new ArrayList<PointMatch>();
tilePair[0].commonPointMatches(tilePair[1], commonMatches);
final Collection<PointMatch> originalCommonMatches = new ArrayList<PointMatch>();
for (final PointMatch pm : commonMatches) originalCommonMatches.add(new PointMatch(originalPoints.get(pm.getP1()), originalPoints.get(pm.getP2())));
matches.add(new PointMatchCollectionAndAffine(a, originalCommonMatches));
}
if (worker != null)
worker.setTaskName("Estimating lens distortion correction");
lensModel = Distortion_Correction.createInverseDistortionModel(matches, p.dimension, p.lambda, (int) fixedTile.getWidth(), (int) fixedTile.getHeight());
/* update local points */
for (final AbstractAffineTile2D<?> t : interestingTiles) for (final PointMatch pm : t.getMatches()) {
final Point currentPoint = pm.getP1();
final Point originalPoint = originalPoints.get(currentPoint);
final double[] l = currentPoint.getL();
final double[] lo = originalPoint.getL();
l[0] = lo[0];
l[1] = lo[1];
lensModel.applyInPlace(l);
}
/* re-optimize */
Align.optimizeTileConfiguration(ap, interestingTiles, fixedTiles);
/* measure the current error */
e = 0;
n = 0;
for (final AbstractAffineTile2D<?> t : interestingTiles) for (final PointMatch pm : t.getMatches()) {
e += pm.getDistance();
++n;
}
e /= n;
dEpsilon_i = e - epsilon_i;
epsilon_i = e;
if (i == 1)
dEpsilon_0 = dEpsilon_i;
Utils.log(i + ": epsilon = " + e);
Utils.log(i + ": delta epsilon = " + dEpsilon_i);
}
if (lensModel != null) {
if (p.visualize) {
if (Thread.currentThread().isInterrupted())
return;
if (worker != null)
worker.setTaskName("Visualizing lens distortion correction");
lensModel.visualizeSmall(p.lambda);
}
if (worker != null)
worker.setTaskName("Applying lens distortion correction");
appendCoordinateTransform(allPatches, lensModel, Runtime.getRuntime().availableProcessors());
Utils.log("Done.");
} else
Utils.log("No lens model found.");
}
use of mpicbg.models.PointMatch in project TrakEM2 by trakem2.
the class Compare method transferVectorStrings.
/**
* Transform all points of all VectorString3D in vs using a Moving Least Squares Transform defined by the pairing of points in source to those in target.
* In short, bring source into target.
*/
public static List<VectorString3D> transferVectorStrings(final List<VectorString3D> vs, final List<Tuple3d> source, final List<Tuple3d> target, final Class<AffineModel3D> model_class) throws Exception {
if (source.size() != target.size()) {
Utils.log2("Could not generate a MovingLeastSquaresTransform: different number of source and target points.");
return null;
}
if (source.size() < 1 || target.size() < 1) {
Utils.log2("Cannot transform with less than one point correspondence!");
return null;
}
// 1 - Create the MovingLeastSquaresTransform from the point matches
final ArrayList<PointMatch> pm = new ArrayList<PointMatch>();
for (final Iterator<Tuple3d> si = source.iterator(), ti = target.iterator(); si.hasNext(); ) {
final Tuple3d sp = si.next();
final Tuple3d tp = ti.next();
pm.add(new PointMatch(new mpicbg.models.Point(new double[] { sp.x, sp.y, sp.z }), new mpicbg.models.Point(new double[] { tp.x, tp.y, tp.z }), 1));
}
final MovingLeastSquaresTransform mls = new MovingLeastSquaresTransform();
mls.setModel(model_class);
mls.setMatches(pm);
final double[] point = new double[3];
// 1.1 - Test: transfer source points
/*
for (final Iterator<Tuple3d> si = source.iterator(), ti = target.iterator(); si.hasNext(); ) {
Tuple3d sp = si.next();
point[0] = (double) sp.x;
point[1] = (double) sp.y;
point[2] = (double) sp.z;
mls.applyInPlace(point);
Tuple3d tp = ti.next();
Utils.log2("== target: " + (double)tp.x + ", " + (double)tp.y + ", " + (double)tp.z +
"\n o source: " + (double)sp.x + ", " + (double)sp.y + ", " + (double)sp.z +
"\n source: " + point[0] + ", " + point[1] + ", " + point[2]);
}
*/
// 2 - Transfer each VectorString3D in vs with mls
final List<VectorString3D> vt = new ArrayList<VectorString3D>();
for (final VectorString3D vi : vs) {
// The points of the VectorString3D:
final double[] x = vi.getPoints(0);
final double[] y = vi.getPoints(1);
final double[] z = vi.getPoints(2);
// Empty arrays to fill with the points to transfer:
final double[] tx = new double[x.length];
final double[] ty = new double[x.length];
final double[] tz = new double[x.length];
// Transfer point by point:
for (int i = 0; i < x.length; i++) {
point[0] = x[i];
point[1] = y[i];
point[2] = z[i];
mls.applyInPlace(point);
tx[i] = point[0];
ty[i] = point[1];
tz[i] = point[2];
}
try {
vt.add(new VectorString3D(tx, ty, tz, vi.isClosed()));
} catch (final Exception e) {
}
}
return vt;
}
use of mpicbg.models.PointMatch in project TrakEM2 by trakem2.
the class ManualAlignMode method align.
private AffineTransform align(final SortedMap<Layer, Landmarks> sm, final AbstractAffineModel2D<?> model) {
Layer layer1 = sm.firstKey();
Landmarks lm1 = sm.get(sm.firstKey());
final AffineTransform accum = new AffineTransform();
for (final Map.Entry<Layer, Landmarks> e : sm.entrySet()) {
final Layer layer2 = e.getKey();
if (layer1 == layer2)
continue;
final Landmarks lm2 = e.getValue();
// Create pointmatches
final ArrayList<PointMatch> matches = new ArrayList<PointMatch>();
for (int i = 0; i < lm1.points.size(); i++) {
matches.add(new PointMatch(lm2.points.get(i), lm1.points.get(i)));
}
final AbstractAffineModel2D<?> mod = model.copy();
try {
mod.fit(matches);
} catch (final Throwable t) {
IJError.print(t);
// continue happily
}
accum.preConcatenate(mod.createAffine());
layer2.apply(Patch.class, accum);
layer1 = layer2;
lm1 = lm2;
}
return accum;
}
Aggregations