use of mpicbg.models.RigidModel2D in project TrakEM2 by trakem2.
the class MovingLeastSquaresTransform2 method init2.
public void init2(final String s) throws Exception {
// WARNING: assumes all whitespace is single
final int len = s.length();
int i = 0;
// Advance to the first white space
while (' ' != s.charAt(++i)) {
}
// Interpret model by the last letter of the name
final char modelLastChar = s.charAt(i - 1);
// Determine dimension 2 or 3
final int n = (s.charAt(i + 1)) - 48;
switch(n) {
case 3:
model = new AffineModel3D();
break;
case 2:
switch(modelLastChar) {
case // translation
'n':
model = new TranslationModel2D();
break;
case // rigid
'd':
model = new RigidModel2D();
break;
case // similarity
'y':
model = new SimilarityModel2D();
break;
case // affine
'e':
model = new AffineModel2D();
break;
default:
throw new Exception("Unknown model " + s.substring(0, i));
}
break;
default:
throw new NumberFormatException("Unsupported model dimensions: " + n + " for " + this.getClass().getCanonicalName());
}
// 'i' is at whitespace before n
// Move i to whitespace before alpha
i += 2;
// Mark last char before whitespace
int cut = i - 1;
// 'i' ends up at the whitespace after alpha
while (' ' != s.charAt(++i)) {
}
// Parse alpha
float[] f = new float[1];
parse(s, cut, i - 1, f, 0);
this.alpha = f[0];
// Count numbers by counting one whitespace before each number
int nVals = 0;
for (int k = i; k < len; ++k) {
if (' ' == s.charAt(k))
++nVals;
}
// The size of a unit of numbers
final int cell = n + n + 1;
// Detect inconsistency:
if (0 != nVals % cell) {
throw new NumberFormatException("Inappropriate parameters for " + this.getClass().getCanonicalName());
}
// Create arrays
this.p = new float[n][nVals / cell];
this.q = new float[n][this.p[0].length];
this.w = new float[this.p[0].length];
// Mark the whitespace char before the first number
cut = i - 1;
// Start parsing from the end
i = len - 1;
int count = 0;
if (2 == n) {
while (i > cut) {
// Determine which array from {p,q,w} and which position in the array, using n and count:
switch(// n for dimensions, +1 for the weight
count % cell) {
case 0:
f = this.w;
break;
case 1:
f = this.q[1];
break;
case 2:
f = this.q[0];
break;
case 3:
f = this.p[1];
break;
case 4:
f = this.p[0];
break;
}
i = parse(s, cut, i, f, this.w.length - (count / cell) - 1);
++count;
}
} else {
while (i > cut) {
// Determine which array from {p,q,w} and which position in the array, using n and count:
switch(// n for dimensions, +1 for the weight
count % (n + n + 1)) {
case 0:
f = this.w;
break;
case 1:
f = this.q[2];
break;
case 2:
f = this.q[1];
break;
case 3:
f = this.q[0];
break;
case 4:
f = this.p[2];
break;
case 5:
f = this.p[1];
break;
case 6:
f = this.p[0];
break;
}
i = parse(s, cut, i, f, this.w.length - (count / cell) - 1);
++count;
}
}
}
use of mpicbg.models.RigidModel2D in project TrakEM2 by trakem2.
the class Distortion_Correction method extractSIFTPointsThreaded.
protected static void extractSIFTPointsThreaded(final int index, final List<Feature>[] siftFeatures, final List<PointMatch>[] inliers, final AbstractAffineModel2D<?>[] models) {
// save all matching candidates
final List<PointMatch>[] candidates = new List[siftFeatures.length - 1];
final Thread[] threads = MultiThreading.newThreads();
// start at second
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ++ithread) {
threads[ithread] = new Thread() {
@Override
public void run() {
setPriority(Thread.NORM_PRIORITY);
for (int j = ai.getAndIncrement(); j < candidates.length; j = ai.getAndIncrement()) {
final int i = (j < index ? j : j + 1);
candidates[j] = FloatArray2DSIFT.createMatches(siftFeatures[index], siftFeatures[i], 1.5f, null, Float.MAX_VALUE, 0.5f);
}
}
};
}
MultiThreading.startAndJoin(threads);
// get rid of the outliers and save the rigid transformations to match
// the inliers
final AtomicInteger ai2 = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ++ithread) {
threads[ithread] = new Thread() {
@Override
public void run() {
setPriority(Thread.NORM_PRIORITY);
for (int i = ai2.getAndIncrement(); i < candidates.length; i = ai2.getAndIncrement()) {
final List<PointMatch> tmpInliers = new ArrayList<PointMatch>();
// RigidModel2D m =
// RigidModel2D.estimateBestModel(candidates.get(i),
// tmpInliers, sp.min_epsilon, sp.max_epsilon,
// sp.min_inlier_ratio);
final AbstractAffineModel2D<?> m;
switch(sp.expectedModelIndex) {
case 0:
m = new TranslationModel2D();
break;
case 1:
m = new RigidModel2D();
break;
case 2:
m = new SimilarityModel2D();
break;
case 3:
m = new AffineModel2D();
break;
default:
return;
}
boolean modelFound = false;
try {
modelFound = m.filterRansac(candidates[i], tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound)
IJ.log("Model found:\n " + candidates[i].size() + " candidates\n " + tmpInliers.size() + " inliers\n " + String.format("%.2f", m.getCost()) + "px average displacement");
else
IJ.log("No Model found.");
inliers[index * (sp.numberOfImages - 1) + i] = tmpInliers;
models[index * (sp.numberOfImages - 1) + i] = m;
// System.out.println("**** MODEL ADDED: " +
// (index*(sp.numberOfImages-1)+i));
}
}
};
}
MultiThreading.startAndJoin(threads);
}
use of mpicbg.models.RigidModel2D in project TrakEM2 by trakem2.
the class Distortion_Correction method extractSIFTPoints.
protected void extractSIFTPoints(final int index, final List<Feature>[] siftFeatures, final List<List<PointMatch>> inliers, final List<AbstractAffineModel2D<?>> models) {
// save all matching candidates
final List<List<PointMatch>> candidates = new ArrayList<List<PointMatch>>();
for (int j = 0; j < siftFeatures.length; j++) {
if (index == j)
continue;
candidates.add(FloatArray2DSIFT.createMatches(siftFeatures[index], siftFeatures[j], 1.5f, null, Float.MAX_VALUE, 0.5f));
}
// get rid of the outliers and save the transformations to match the inliers
for (int i = 0; i < candidates.size(); ++i) {
final List<PointMatch> tmpInliers = new ArrayList<PointMatch>();
final AbstractAffineModel2D<?> m;
switch(sp.expectedModelIndex) {
case 0:
m = new TranslationModel2D();
break;
case 1:
m = new RigidModel2D();
break;
case 2:
m = new SimilarityModel2D();
break;
case 3:
m = new AffineModel2D();
break;
default:
return;
}
try {
m.filterRansac(candidates.get(i), tmpInliers, 1000, sp.maxEpsilon, sp.minInlierRatio, 10);
} catch (final NotEnoughDataPointsException e) {
e.printStackTrace();
}
inliers.add(tmpInliers);
models.add(m);
}
}
use of mpicbg.models.RigidModel2D in project TrakEM2 by trakem2.
the class ElasticLayerAlignment method preAlignStack.
private void preAlignStack(final Param param, final Project project, final List<Layer> layerRange, final Rectangle box, final Filter<Patch> filter, final ArrayList<Triple<Integer, Integer, AbstractModel<?>>> pairs) {
final double scale = Math.min(1.0, Math.min((double) param.ppm.sift.maxOctaveSize / (double) box.width, (double) param.ppm.sift.maxOctaveSize / (double) box.height));
/* extract and save features, overwrite cached files if requested */
try {
AlignmentUtils.extractAndSaveLayerFeatures(layerRange, box, scale, filter, param.ppm.sift, param.ppm.clearCache, param.ppm.maxNumThreadsSift);
} catch (final Exception e) {
return;
}
/* match and filter feature correspondences */
int numFailures = 0;
final double pointMatchScale = param.layerScale / scale;
for (int i = 0; i < layerRange.size(); ++i) {
final ArrayList<Thread> threads = new ArrayList<Thread>(param.maxNumThreads);
final int sliceA = i;
final Layer layerA = layerRange.get(i);
final int range = Math.min(layerRange.size(), i + param.maxNumNeighbors + 1);
final String layerNameA = layerName(layerA);
for (int j = i + 1; j < range; ) J: {
final int numThreads = Math.min(param.maxNumThreads, range - j);
final ArrayList<Triple<Integer, Integer, AbstractModel<?>>> models = new ArrayList<Triple<Integer, Integer, AbstractModel<?>>>(numThreads);
for (int k = 0; k < numThreads; ++k) models.add(null);
for (int t = 0; t < numThreads && j < range; ++t, ++j) {
final int ti = t;
final int sliceB = j;
final Layer layerB = layerRange.get(j);
final String layerNameB = layerName(layerB);
final Thread thread = new Thread() {
@Override
public void run() {
IJ.showProgress(sliceA, layerRange.size() - 1);
Utils.log("matching " + layerNameB + " -> " + layerNameA + "...");
ArrayList<PointMatch> candidates = null;
if (!param.ppm.clearCache)
candidates = mpicbg.trakem2.align.Util.deserializePointMatches(project, param.ppm, "layer", layerB.getId(), layerA.getId());
if (null == candidates) {
final ArrayList<Feature> fs1 = mpicbg.trakem2.align.Util.deserializeFeatures(project, param.ppm.sift, "layer", layerA.getId());
final ArrayList<Feature> fs2 = mpicbg.trakem2.align.Util.deserializeFeatures(project, param.ppm.sift, "layer", layerB.getId());
candidates = new ArrayList<PointMatch>(FloatArray2DSIFT.createMatches(fs2, fs1, param.ppm.rod));
/* scale the candidates */
for (final PointMatch pm : candidates) {
final Point p1 = pm.getP1();
final Point p2 = pm.getP2();
final double[] l1 = p1.getL();
final double[] w1 = p1.getW();
final double[] l2 = p2.getL();
final double[] w2 = p2.getW();
l1[0] *= pointMatchScale;
l1[1] *= pointMatchScale;
w1[0] *= pointMatchScale;
w1[1] *= pointMatchScale;
l2[0] *= pointMatchScale;
l2[1] *= pointMatchScale;
w2[0] *= pointMatchScale;
w2[1] *= pointMatchScale;
}
if (!mpicbg.trakem2.align.Util.serializePointMatches(project, param.ppm, "layer", layerB.getId(), layerA.getId(), candidates))
Utils.log("Could not store point match candidates for layers " + layerNameB + " and " + layerNameA + ".");
}
AbstractModel<?> model;
switch(param.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
case 4:
model = new HomographyModel2D();
break;
default:
return;
}
final ArrayList<PointMatch> inliers = new ArrayList<PointMatch>();
boolean modelFound;
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, param.maxEpsilon * param.layerScale, param.minInlierRatio, param.minNumInliers, 3);
if (modelFound && param.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, param.identityTolerance * param.layerScale)) {
IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
Utils.log(layerNameB + " -> " + layerNameA + ": " + inliers.size() + " corresponding features with an average displacement of " + (PointMatch.meanDistance(inliers) / param.layerScale) + "px identified.");
Utils.log("Estimated transformation model: " + model);
models.set(ti, new Triple<Integer, Integer, AbstractModel<?>>(sliceA, sliceB, model));
} else {
Utils.log(layerNameB + " -> " + layerNameA + ": no correspondences found.");
return;
}
}
};
threads.add(thread);
thread.start();
}
try {
for (final Thread thread : threads) thread.join();
} catch (final InterruptedException e) {
Utils.log("Establishing feature correspondences interrupted.");
for (final Thread thread : threads) thread.interrupt();
try {
for (final Thread thread : threads) thread.join();
} catch (final InterruptedException f) {
}
return;
}
threads.clear();
/* collect successfully matches pairs and break the search on gaps */
for (int t = 0; t < models.size(); ++t) {
final Triple<Integer, Integer, AbstractModel<?>> pair = models.get(t);
if (pair == null) {
if (++numFailures > param.maxNumFailures) {
break J;
}
} else {
numFailures = 0;
pairs.add(pair);
}
}
}
}
}
Aggregations