use of mpicbg.models.SimilarityModel2D in project TrakEM2 by trakem2.
the class MovingLeastSquaresTransform2 method init2.
public void init2(final String s) throws Exception {
// WARNING: assumes all whitespace is single
final int len = s.length();
int i = 0;
// Advance to the first white space
while (' ' != s.charAt(++i)) {
}
// Interpret model by the last letter of the name
final char modelLastChar = s.charAt(i - 1);
// Determine dimension 2 or 3
final int n = (s.charAt(i + 1)) - 48;
switch(n) {
case 3:
model = new AffineModel3D();
break;
case 2:
switch(modelLastChar) {
case // translation
'n':
model = new TranslationModel2D();
break;
case // rigid
'd':
model = new RigidModel2D();
break;
case // similarity
'y':
model = new SimilarityModel2D();
break;
case // affine
'e':
model = new AffineModel2D();
break;
default:
throw new Exception("Unknown model " + s.substring(0, i));
}
break;
default:
throw new NumberFormatException("Unsupported model dimensions: " + n + " for " + this.getClass().getCanonicalName());
}
// 'i' is at whitespace before n
// Move i to whitespace before alpha
i += 2;
// Mark last char before whitespace
int cut = i - 1;
// 'i' ends up at the whitespace after alpha
while (' ' != s.charAt(++i)) {
}
// Parse alpha
float[] f = new float[1];
parse(s, cut, i - 1, f, 0);
this.alpha = f[0];
// Count numbers by counting one whitespace before each number
int nVals = 0;
for (int k = i; k < len; ++k) {
if (' ' == s.charAt(k))
++nVals;
}
// The size of a unit of numbers
final int cell = n + n + 1;
// Detect inconsistency:
if (0 != nVals % cell) {
throw new NumberFormatException("Inappropriate parameters for " + this.getClass().getCanonicalName());
}
// Create arrays
this.p = new float[n][nVals / cell];
this.q = new float[n][this.p[0].length];
this.w = new float[this.p[0].length];
// Mark the whitespace char before the first number
cut = i - 1;
// Start parsing from the end
i = len - 1;
int count = 0;
if (2 == n) {
while (i > cut) {
// Determine which array from {p,q,w} and which position in the array, using n and count:
switch(// n for dimensions, +1 for the weight
count % cell) {
case 0:
f = this.w;
break;
case 1:
f = this.q[1];
break;
case 2:
f = this.q[0];
break;
case 3:
f = this.p[1];
break;
case 4:
f = this.p[0];
break;
}
i = parse(s, cut, i, f, this.w.length - (count / cell) - 1);
++count;
}
} else {
while (i > cut) {
// Determine which array from {p,q,w} and which position in the array, using n and count:
switch(// n for dimensions, +1 for the weight
count % (n + n + 1)) {
case 0:
f = this.w;
break;
case 1:
f = this.q[2];
break;
case 2:
f = this.q[1];
break;
case 3:
f = this.q[0];
break;
case 4:
f = this.p[2];
break;
case 5:
f = this.p[1];
break;
case 6:
f = this.p[0];
break;
}
i = parse(s, cut, i, f, this.w.length - (count / cell) - 1);
++count;
}
}
}
use of mpicbg.models.SimilarityModel2D in project TrakEM2 by trakem2.
the class Distortion_Correction method extractSIFTPointsThreaded.
protected static void extractSIFTPointsThreaded(final int index, final List<Feature>[] siftFeatures, final List<PointMatch>[] inliers, final AbstractAffineModel2D<?>[] models) {
// save all matching candidates
final List<PointMatch>[] candidates = new List[siftFeatures.length - 1];
final Thread[] threads = MultiThreading.newThreads();
// start at second
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ++ithread) {
threads[ithread] = new Thread() {
@Override
public void run() {
setPriority(Thread.NORM_PRIORITY);
for (int j = ai.getAndIncrement(); j < candidates.length; j = ai.getAndIncrement()) {
final int i = (j < index ? j : j + 1);
candidates[j] = FloatArray2DSIFT.createMatches(siftFeatures[index], siftFeatures[i], 1.5f, null, Float.MAX_VALUE, 0.5f);
}
}
};
}
MultiThreading.startAndJoin(threads);
// get rid of the outliers and save the rigid transformations to match
// the inliers
final AtomicInteger ai2 = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ++ithread) {
threads[ithread] = new Thread() {
@Override
public void run() {
setPriority(Thread.NORM_PRIORITY);
for (int i = ai2.getAndIncrement(); i < candidates.length; i = ai2.getAndIncrement()) {
final List<PointMatch> tmpInliers = new ArrayList<PointMatch>();
// RigidModel2D m =
// RigidModel2D.estimateBestModel(candidates.get(i),
// tmpInliers, sp.min_epsilon, sp.max_epsilon,
// sp.min_inlier_ratio);
final AbstractAffineModel2D<?> m;
switch(sp.expectedModelIndex) {
case 0:
m = new TranslationModel2D();
break;
case 1:
m = new RigidModel2D();
break;
case 2:
m = new SimilarityModel2D();
break;
case 3:
m = new AffineModel2D();
break;
default:
return;
}
boolean modelFound = false;
try {
modelFound = m.filterRansac(candidates[i], tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound)
IJ.log("Model found:\n " + candidates[i].size() + " candidates\n " + tmpInliers.size() + " inliers\n " + String.format("%.2f", m.getCost()) + "px average displacement");
else
IJ.log("No Model found.");
inliers[index * (sp.numberOfImages - 1) + i] = tmpInliers;
models[index * (sp.numberOfImages - 1) + i] = m;
// System.out.println("**** MODEL ADDED: " +
// (index*(sp.numberOfImages-1)+i));
}
}
};
}
MultiThreading.startAndJoin(threads);
}
use of mpicbg.models.SimilarityModel2D in project TrakEM2 by trakem2.
the class Distortion_Correction method extractSIFTPoints.
protected void extractSIFTPoints(final int index, final List<Feature>[] siftFeatures, final List<List<PointMatch>> inliers, final List<AbstractAffineModel2D<?>> models) {
// save all matching candidates
final List<List<PointMatch>> candidates = new ArrayList<List<PointMatch>>();
for (int j = 0; j < siftFeatures.length; j++) {
if (index == j)
continue;
candidates.add(FloatArray2DSIFT.createMatches(siftFeatures[index], siftFeatures[j], 1.5f, null, Float.MAX_VALUE, 0.5f));
}
// get rid of the outliers and save the transformations to match the inliers
for (int i = 0; i < candidates.size(); ++i) {
final List<PointMatch> tmpInliers = new ArrayList<PointMatch>();
final AbstractAffineModel2D<?> m;
switch(sp.expectedModelIndex) {
case 0:
m = new TranslationModel2D();
break;
case 1:
m = new RigidModel2D();
break;
case 2:
m = new SimilarityModel2D();
break;
case 3:
m = new AffineModel2D();
break;
default:
return;
}
try {
m.filterRansac(candidates.get(i), tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10);
} catch (final NotEnoughDataPointsException e) {
e.printStackTrace();
}
inliers.add(tmpInliers);
models.add(m);
}
}
use of mpicbg.models.SimilarityModel2D in project TrakEM2 by trakem2.
the class Align method alignLayersLinearly.
/**
* Align a range of layers by accumulating pairwise alignments of contiguous layers.
*
* @param layers The range of layers to align pairwise.
* @param numThreads The number of threads to use.
* @param filter The {@link Filter} to decide which {@link Patch} instances to use in each {@link Layer}. Can be null.
*/
public static final void alignLayersLinearly(final List<Layer> layers, final int numThreads, final Filter<Patch> filter) {
param.sift.maxOctaveSize = 1600;
if (!param.setup("Align layers linearly"))
return;
final Rectangle box = layers.get(0).getParent().getMinimalBoundingBox(Patch.class);
final double scale = Math.min(1.0, Math.min((double) param.sift.maxOctaveSize / box.width, (double) param.sift.maxOctaveSize / box.height));
final Param p = param.clone();
p.maxEpsilon *= scale;
final FloatArray2DSIFT sift = new FloatArray2DSIFT(p.sift);
final SIFT ijSIFT = new SIFT(sift);
Rectangle box1 = null;
Rectangle box2 = null;
final Collection<Feature> features1 = new ArrayList<Feature>();
final Collection<Feature> features2 = new ArrayList<Feature>();
final List<PointMatch> candidates = new ArrayList<PointMatch>();
final List<PointMatch> inliers = new ArrayList<PointMatch>();
final AffineTransform a = new AffineTransform();
int i = 0;
for (final Layer l : layers) {
long s = System.currentTimeMillis();
features1.clear();
features1.addAll(features2);
features2.clear();
final Rectangle box3 = l.getMinimalBoundingBox(Patch.class);
if (box3 == null)
continue;
box1 = box2;
box2 = box3;
final List<Patch> patches = l.getAll(Patch.class);
if (null != filter) {
for (final Iterator<Patch> it = patches.iterator(); it.hasNext(); ) {
if (!filter.accept(it.next()))
it.remove();
}
}
ijSIFT.extractFeatures(l.getProject().getLoader().getFlatImage(l, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches, true).getProcessor(), features2);
Utils.log(features2.size() + " features extracted in layer \"" + l.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
if (features1.size() > 0) {
s = System.currentTimeMillis();
candidates.clear();
FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
final AbstractAffineModel2D<?> model;
switch(p.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return;
}
boolean modelFound;
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
if (modelFound && p.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, p.identityTolerance)) {
Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
Utils.log("Model found for layer \"" + l.getTitle() + "\" and its predecessor:\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + (model.getCost() / scale) + " px\n took " + (System.currentTimeMillis() - s) + " ms");
final AffineTransform b = new AffineTransform();
b.translate(box1.x, box1.y);
b.scale(1.0f / scale, 1.0f / scale);
b.concatenate(model.createAffine());
b.scale(scale, scale);
b.translate(-box2.x, -box2.y);
a.concatenate(b);
l.apply(Displayable.class, a);
Display.repaint(l);
} else {
Utils.log("No model found for layer \"" + l.getTitle() + "\" and its predecessor:\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
a.setToIdentity();
}
}
IJ.showProgress(++i, layers.size());
}
}
use of mpicbg.models.SimilarityModel2D in project TrakEM2 by trakem2.
the class Align method alignTileCollections.
/**
* Align two collections of tiles
* @param p
* @param a
* @param b
*/
public static final void alignTileCollections(final Param p, final Collection<AbstractAffineTile2D<?>> a, final Collection<AbstractAffineTile2D<?>> b) {
final ArrayList<Patch> pa = new ArrayList<Patch>();
final ArrayList<Patch> pb = new ArrayList<Patch>();
for (final AbstractAffineTile2D<?> t : a) pa.add(t.getPatch());
for (final AbstractAffineTile2D<?> t : b) pb.add(t.getPatch());
final Layer la = pa.iterator().next().getLayer();
final Layer lb = pb.iterator().next().getLayer();
final Rectangle boxA = Displayable.getBoundingBox(pa, null);
final Rectangle boxB = Displayable.getBoundingBox(pb, null);
final double scale = Math.min(1.0, Math.min(Math.min((double) p.sift.maxOctaveSize / boxA.width, (double) p.sift.maxOctaveSize / boxA.height), Math.min((double) p.sift.maxOctaveSize / boxB.width, (double) p.sift.maxOctaveSize / boxB.height)));
final Param pp = p.clone();
pp.maxEpsilon *= scale;
final FloatArray2DSIFT sift = new FloatArray2DSIFT(pp.sift);
final SIFT ijSIFT = new SIFT(sift);
final Collection<Feature> featuresA = new ArrayList<Feature>();
final Collection<Feature> featuresB = new ArrayList<Feature>();
final List<PointMatch> candidates = new ArrayList<PointMatch>();
final List<PointMatch> inliers = new ArrayList<PointMatch>();
long s = System.currentTimeMillis();
ijSIFT.extractFeatures(la.getProject().getLoader().getFlatImage(la, boxA, scale, 0xffffffff, ImagePlus.GRAY8, null, pa, true, Color.GRAY).getProcessor(), featuresA);
Utils.log(featuresA.size() + " features extracted in graph A in layer \"" + la.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
s = System.currentTimeMillis();
ijSIFT.extractFeatures(lb.getProject().getLoader().getFlatImage(lb, boxB, scale, 0xffffffff, ImagePlus.GRAY8, null, pb, true, Color.GRAY).getProcessor(), featuresB);
Utils.log(featuresB.size() + " features extracted in graph B in layer \"" + lb.getTitle() + "\" (took " + (System.currentTimeMillis() - s) + " ms).");
if (featuresA.size() > 0 && featuresB.size() > 0) {
s = System.currentTimeMillis();
FeatureTransform.matchFeatures(featuresA, featuresB, candidates, pp.rod);
final AbstractAffineModel2D<?> model;
switch(p.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return;
}
boolean modelFound;
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
if (modelFound && p.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, p.identityTolerance)) {
Utils.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
Utils.log("Model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + (model.getCost() / scale) + " px\n took " + (System.currentTimeMillis() - s) + " ms");
final AffineTransform at = new AffineTransform();
at.translate(boxA.x, boxA.y);
at.scale(1.0f / scale, 1.0f / scale);
at.concatenate(model.createAffine());
at.scale(scale, scale);
at.translate(-boxB.x, -boxB.y);
for (final Patch t : pa) t.preTransform(at, false);
Display.repaint(la);
} else
Utils.log("No model found for graph A and B in layers \"" + la.getTitle() + "\" and \"" + lb.getTitle() + "\":\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
}
}
Aggregations