use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class AlignLayersTask method alignLayersNonLinearlyJob.
public static final void alignLayersNonLinearlyJob(final LayerSet layerSet, final int first, final int last, final boolean propagateTransform, final Rectangle fov, final Filter<Patch> filter) {
// will reverse order if necessary
final List<Layer> layerRange = layerSet.getLayers(first, last);
final Align.Param p = Align.param.clone();
// Remove all empty layers
for (final Iterator<Layer> it = layerRange.iterator(); it.hasNext(); ) {
if (!it.next().contains(Patch.class, true)) {
it.remove();
}
}
if (0 == layerRange.size()) {
Utils.log("No layers in range show any images!");
return;
}
/* do not work if there is only one layer selected */
if (layerRange.size() < 2)
return;
final List<Patch> all = new ArrayList<Patch>();
for (final Layer la : layerRange) {
for (final Patch patch : la.getAll(Patch.class)) {
if (null != filter && !filter.accept(patch))
continue;
all.add(patch);
}
}
AlignTask.transformPatchesAndVectorData(all, new Runnable() {
@Override
public void run() {
// ///
final Loader loader = layerSet.getProject().getLoader();
// Not concurrent safe! So two copies, one per layer and Thread:
final SIFT ijSIFT1 = new SIFT(new FloatArray2DSIFT(p.sift));
final SIFT ijSIFT2 = new SIFT(new FloatArray2DSIFT(p.sift));
final Collection<Feature> features1 = new ArrayList<Feature>();
final Collection<Feature> features2 = new ArrayList<Feature>();
final List<PointMatch> candidates = new ArrayList<PointMatch>();
final List<PointMatch> inliers = new ArrayList<PointMatch>();
final int n_proc = Runtime.getRuntime().availableProcessors() > 1 ? 2 : 1;
final ExecutorService exec = Utils.newFixedThreadPool(n_proc, "alignLayersNonLinearly");
List<Patch> previousPatches = null;
int s = 0;
for (int i = 1; i < layerRange.size(); ++i) {
if (Thread.currentThread().isInterrupted())
break;
final Layer layer1 = layerRange.get(i - 1);
final Layer layer2 = layerRange.get(i);
final long t0 = System.currentTimeMillis();
features1.clear();
features2.clear();
final Rectangle box1 = null == fov ? layer1.getMinimalBoundingBox(Patch.class, true) : fov;
final Rectangle box2 = null == fov ? layer2.getMinimalBoundingBox(Patch.class, true) : fov;
/* calculate the common scale factor for both flat images */
final double scale = Math.min(1.0f, (double) p.sift.maxOctaveSize / (double) Math.max(box1.width, Math.max(box1.height, Math.max(box2.width, box2.height))));
final List<Patch> patches1;
if (null == previousPatches) {
patches1 = layer1.getAll(Patch.class);
if (null != filter) {
for (final Iterator<Patch> it = patches1.iterator(); it.hasNext(); ) {
if (!filter.accept(it.next()))
it.remove();
}
}
} else {
patches1 = previousPatches;
}
final List<Patch> patches2 = layer2.getAll(Patch.class);
if (null != filter) {
for (final Iterator<Patch> it = patches2.iterator(); it.hasNext(); ) {
if (!filter.accept(it.next()))
it.remove();
}
}
final Future<ImageProcessor> fu1 = exec.submit(new Callable<ImageProcessor>() {
@Override
public ImageProcessor call() {
final ImageProcessor ip1 = loader.getFlatImage(layer1, box1, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches1, true).getProcessor();
ijSIFT1.extractFeatures(ip1, features1);
Utils.log(features1.size() + " features extracted in layer \"" + layer1.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
return ip1;
}
});
final Future<ImageProcessor> fu2 = exec.submit(new Callable<ImageProcessor>() {
@Override
public ImageProcessor call() {
final ImageProcessor ip2 = loader.getFlatImage(layer2, box2, scale, 0xffffffff, ImagePlus.GRAY8, Patch.class, patches2, true).getProcessor();
ijSIFT2.extractFeatures(ip2, features2);
Utils.log(features2.size() + " features extracted in layer \"" + layer2.getTitle() + "\" (took " + (System.currentTimeMillis() - t0) + " ms).");
return ip2;
}
});
final ImageProcessor ip1, ip2;
try {
ip1 = fu1.get();
ip2 = fu2.get();
} catch (final Exception e) {
IJError.print(e);
return;
}
if (features1.size() > 0 && features2.size() > 0) {
final long t1 = System.currentTimeMillis();
candidates.clear();
FeatureTransform.matchFeatures(features2, features1, candidates, p.rod);
final AbstractAffineModel2D<?> model;
switch(p.expectedModelIndex) {
case 0:
model = new TranslationModel2D();
break;
case 1:
model = new RigidModel2D();
break;
case 2:
model = new SimilarityModel2D();
break;
case 3:
model = new AffineModel2D();
break;
default:
return;
}
boolean modelFound;
boolean again = false;
try {
do {
again = false;
modelFound = model.filterRansac(candidates, inliers, 1000, p.maxEpsilon, p.minInlierRatio, p.minNumInliers, 3);
if (modelFound && p.rejectIdentity) {
final ArrayList<Point> points = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, points);
if (Transforms.isIdentity(model, points, p.identityTolerance)) {
IJ.log("Identity transform for " + inliers.size() + " matches rejected.");
candidates.removeAll(inliers);
inliers.clear();
again = true;
}
}
} while (again);
} catch (final NotEnoughDataPointsException e) {
modelFound = false;
}
if (modelFound) {
IJ.log("Model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n correspondences " + inliers.size() + " of " + candidates.size() + "\n average residual error " + (model.getCost() / scale) + " px\n took " + (System.currentTimeMillis() - t1) + " ms");
final ImagePlus imp1 = new ImagePlus("target", ip1);
final ImagePlus imp2 = new ImagePlus("source", ip2);
final List<Point> sourcePoints = new ArrayList<Point>();
final List<Point> targetPoints = new ArrayList<Point>();
PointMatch.sourcePoints(inliers, sourcePoints);
PointMatch.targetPoints(inliers, targetPoints);
imp2.setRoi(Util.pointsToPointRoi(sourcePoints));
imp1.setRoi(Util.pointsToPointRoi(targetPoints));
final ImageProcessor mask1 = ip1.duplicate();
mask1.threshold(1);
final ImageProcessor mask2 = ip2.duplicate();
mask2.threshold(1);
final Transformation warp = bUnwarpJ_.computeTransformationBatch(imp2, imp1, mask2, mask1, elasticParam);
final CubicBSplineTransform transf = new CubicBSplineTransform();
transf.set(warp.getIntervals(), warp.getDirectDeformationCoefficientsX(), warp.getDirectDeformationCoefficientsY(), imp2.getWidth(), imp2.getHeight());
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
// Transform desired patches only
for (final Patch patch : patches2) {
try {
final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
final AffineTransform at = patch.getAffineTransform();
final AffineTransform pat = new AffineTransform();
pat.scale(scale, scale);
pat.translate(-box2.x, -box2.y);
pat.concatenate(at);
pat.translate(-pbox.x, -pbox.y);
final mpicbg.trakem2.transform.AffineModel2D toWorld = new mpicbg.trakem2.transform.AffineModel2D();
toWorld.set(pat);
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
// move the patch into the global space where bUnwarpJ calculated the transformation
ctl.add(toWorld);
// Apply non-linear transformation
ctl.add(transf);
// move it back
ctl.add(toWorld.createInverse());
patch.appendCoordinateTransform(ctl);
fus.add(patch.updateMipMaps());
// Compensate for offset between boxes
final AffineTransform offset = new AffineTransform();
offset.translate(box1.x - box2.x, box1.y - box2.y);
offset.concatenate(at);
patch.setAffineTransform(offset);
} catch (final Exception e) {
e.printStackTrace();
}
}
// await regeneration of all mipmaps
Utils.wait(fus);
Display.repaint(layer2);
} else
IJ.log("No model found for layer \"" + layer2.getTitle() + "\" and its predecessor:\n correspondence candidates " + candidates.size() + "\n took " + (System.currentTimeMillis() - s) + " ms");
}
IJ.showProgress(++s, layerRange.size());
// for next iteration
previousPatches = patches2;
}
exec.shutdown();
if (propagateTransform)
Utils.log("Propagation not implemented yet for non-linear layer alignment.");
/* // CANNOT be done (at least not trivially:
* //an appropriate "scale" cannot be computed, and the box2 is part of the spline computation.
if ( propagateTransform && null != lastTransform )
{
for (final Layer la : l.getParent().getLayers(last > first ? last +1 : first -1, last > first ? l.getParent().size() -1 : 0)) {
// Transform visible patches only
final Rectangle box2 = la.getMinimalBoundingBox( Patch.class, true );
for ( final Displayable disp : la.getDisplayables( Patch.class, true ) )
{
// ...
}
}
}
*/
}
});
// end of transformPatchesAndVectorData
}
use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class Util method applyLayerTransformToPatch.
public static final void applyLayerTransformToPatch(final Patch patch, final CoordinateTransform ct) throws Exception {
final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
final AffineTransform pat = new AffineTransform();
pat.translate(-pbox.x, -pbox.y);
pat.preConcatenate(patch.getAffineTransform());
final AffineModel2D toWorld = new AffineModel2D();
toWorld.set(pat);
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
ctl.add(toWorld);
ctl.add(ct);
ctl.add(toWorld.createInverse());
patch.appendCoordinateTransform(ctl);
}
use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class NonLinearTransformMode method applyToPatch.
private final Future<Boolean> applyToPatch(final Patch patch) throws Exception {
final Rectangle pbox = patch.getCoordinateTransformBoundingBox();
final AffineTransform pat = new AffineTransform();
pat.translate(-pbox.x, -pbox.y);
pat.preConcatenate(patch.getAffineTransform());
final AffineModel2D toWorld = new AffineModel2D();
toWorld.set(pat);
final CoordinateTransform mlst = createCT();
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
ctl.add(toWorld);
ctl.add(mlst);
ctl.add(toWorld.createInverse());
patch.appendCoordinateTransform(ctl);
return patch.updateMipMaps();
}
use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class Patch method appendCoordinateTransform.
/**
* Append a {@link CoordinateTransform} to the current
* {@link CoordinateTransformList}. If there is no transform yet, it just
* sets it. If there is only one transform, it replaces it by a list
* containing both, the existing first.
*/
@SuppressWarnings("unchecked")
public final void appendCoordinateTransform(final CoordinateTransform ct) {
if (!hasCoordinateTransform())
setCoordinateTransform(ct);
else {
final CoordinateTransformList<CoordinateTransform> ctl;
final CoordinateTransform this_ct = getCoordinateTransform();
if (this_ct instanceof CoordinateTransformList<?>)
ctl = (CoordinateTransformList<CoordinateTransform>) this_ct.copy();
else {
ctl = new CoordinateTransformList<CoordinateTransform>();
ctl.add(this_ct);
}
ctl.add(ct);
setCoordinateTransform(ctl);
}
}
use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class Patch method makeFlatImage.
/**
* Creates an ImageProcessor of the specified type.
* @param type Any of ImagePlus.GRAY_8, GRAY_16, GRAY_32 or COLOR_RGB.
* @param srcRect the box in world coordinates to make an image out of.
* @param scale may be up to 1.0.
* @param patches The list of patches to paint. The first gets painted first (at the bottom).
* @param background The color with which to paint the outsides where no image paints into.
* @param setMinAndMax defines whether the min and max of each Patch is set before pasting the Patch.
*
* For exporting while blending the display ranges (min,max) and respecting alpha masks, see {@link ExportUnsignedShort}.
*/
public static ImageProcessor makeFlatImage(final int type, final Layer layer, final Rectangle srcRect, final double scale, final Collection<Patch> patches, final Color background, final boolean setMinAndMax) {
final ImageProcessor ip;
final int W, H;
if (scale < 1) {
W = (int) (srcRect.width * scale);
H = (int) (srcRect.height * scale);
} else {
W = srcRect.width;
H = srcRect.height;
}
switch(type) {
case ImagePlus.GRAY8:
ip = new ByteProcessor(W, H);
break;
case ImagePlus.GRAY16:
ip = new ShortProcessor(W, H);
break;
case ImagePlus.GRAY32:
ip = new FloatProcessor(W, H);
break;
case ImagePlus.COLOR_RGB:
ip = new ColorProcessor(W, H);
break;
default:
Utils.logAll("Cannot create an image of type " + type + ".\nSupported types: 8-bit, 16-bit, 32-bit and RGB.");
return null;
}
// Fill with background
if (null != background && Color.black != background) {
ip.setColor(background);
ip.fill();
}
AffineModel2D sc = null;
if (scale < 1.0) {
sc = new AffineModel2D();
sc.set(scale, 0, 0, scale, 0, 0);
}
for (final Patch p : patches) {
// TODO patches seem to come in in inverse order---find out why
// A list to represent all the transformations that the Patch image has to go through to reach the scaled srcRect image
final CoordinateTransformList<CoordinateTransform> list = new CoordinateTransformList<CoordinateTransform>();
final AffineTransform at = new AffineTransform();
at.translate(-srcRect.x, -srcRect.y);
at.concatenate(p.getAffineTransform());
// 1. The coordinate tranform of the Patch, if any
if (p.hasCoordinateTransform()) {
final CoordinateTransform ct = p.getCoordinateTransform();
list.add(ct);
// Remove the translation in the patch_affine that the ct added to it
final Rectangle box = Patch.getCoordinateTransformBoundingBox(p, ct);
at.translate(-box.x, -box.y);
}
// 2. The affine transform of the Patch
final AffineModel2D patch_affine = new AffineModel2D();
patch_affine.set(at);
list.add(patch_affine);
// 3. The desired scaling
if (null != sc)
patch_affine.preConcatenate(sc);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(list, p.meshResolution, p.getOWidth(), p.getOHeight());
final mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh> mapping = new mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh>(mesh);
// 4. Convert the patch to the required type
ImageProcessor pi = p.getImageProcessor();
if (setMinAndMax) {
pi = pi.duplicate();
pi.setMinAndMax(p.min, p.max);
}
switch(type) {
case ImagePlus.GRAY8:
pi = pi.convertToByte(true);
break;
case ImagePlus.GRAY16:
pi = pi.convertToShort(true);
break;
case ImagePlus.GRAY32:
pi = pi.convertToFloat();
break;
default:
// ImagePlus.COLOR_RGB and COLOR_256
pi = pi.convertToRGB();
break;
}
/* TODO for taking into account independent min/max setting for each patch,
* we will need a mapping with an `intensity transfer function' to be implemented.
* --> EXISTS already as mpicbg/trakem2/transform/ExportUnsignedShort.java
*/
mapping.mapInterpolated(pi, ip);
}
return ip;
}
Aggregations