use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class AlignTask method transformVectorData.
public static final void transformVectorData(final ReferenceData rd, /* The transformations of patches before alignment. */
final Collection<Displayable> vdata, /* The VectorData instances to transform along with images. */
final LayerSet target_layerset) /* The LayerSet in which the vdata and the transformed images exist. */
{
final ExecutorService exec = Utils.newFixedThreadPool("AlignTask-transformVectorData");
try {
final Collection<Future<?>> fus = new ArrayList<Future<?>>();
final HashMap<Long, Layer> lidm = new HashMap<Long, Layer>();
for (final Long lid : rd.src_layer_lids_used) {
final Layer la = target_layerset.getLayer(lid.longValue());
if (null == la) {
Utils.log("ERROR layer with id " + lid + " NOT FOUND in target layerset!");
continue;
}
lidm.put(lid, la);
}
for (final Map.Entry<Displayable, Map<Long, TreeMap<Integer, Long>>> ed : rd.underlying.entrySet()) {
// The VectorData instance to transform
final Displayable d = ed.getKey();
// Process Displayables concurrently:
fus.add(exec.submit(new Runnable() {
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void run() {
for (final Map.Entry<Long, TreeMap<Integer, Long>> el : ed.getValue().entrySet()) {
// The entry has the id of the layer and the stack-index-ordered list of Patch that intersect VectorData d in that Layer
final Layer layer = lidm.get(el.getKey());
if (null == layer) {
Utils.log("ERROR layer with id " + el.getKey() + " NOT FOUND in target layerset!");
continue;
}
// Utils.log("Editing Displayable " + d + " at layer " + layer);
// list of Patch ids affecting VectorData/Displayable d
final ArrayList<Long> pids = new ArrayList<Long>(el.getValue().values());
// so now Patch ids are sorted from top to bottom
Collections.reverse(pids);
// The area already processed in the layer
final Area used_area = new Area();
// The map of areas vs transforms for each area to apply to the VectorData, to its data within the layer only
final VectorDataTransform vdt = new VectorDataTransform(layer);
// The list of transforms to apply to each VectorData
for (final long pid : pids) {
// Find the Patch with id 'pid' in Layer 'la' of the target LayerSet:
final DBObject ob = layer.findById(pid);
if (null == ob || !(ob instanceof Patch)) {
Utils.log("ERROR layer with id " + layer.getId() + " DOES NOT CONTAIN a Patch with id " + pid);
continue;
}
final Patch patch = (Patch) ob;
// no need to synch, read only from now on
final Patch.TransformProperties props = rd.tp.get(pid);
if (null == props) {
Utils.log("ERROR: could not find any Patch.TransformProperties for patch " + patch);
continue;
}
final Area a = new Area(props.area);
a.subtract(used_area);
if (M.isEmpty(a)) {
// skipping fully occluded Patch
continue;
}
// Accumulate:
used_area.add(props.area);
// For the remaining area within this Layer, define a transform
// Generate a CoordinateTransformList that includes:
// 1 - an inverted transform from Patch coords to world coords
// 2 - the CoordinateTransform of the Patch, if any
// 3 - the AffineTransform of the Patch
//
// The idea is to first send the data from world to pixel space of the Patch, using the old transfroms,
// and then from pixel space of the Patch to world, using the new transforms.
final CoordinateTransformList tlist = new CoordinateTransformList();
// 1. Inverse of the old affine: from world into the old patch mipmap
final mpicbg.models.AffineModel2D aff_inv = new mpicbg.models.AffineModel2D();
try {
aff_inv.set(props.at.createInverse());
} catch (final NoninvertibleTransformException nite) {
Utils.log("ERROR: could not invert the affine transform for Patch " + patch);
IJError.print(nite);
continue;
}
tlist.add(aff_inv);
// 2. Inverse of the old coordinate transform of the Patch: from old mipmap to pixels in original image
if (null != props.ct) {
// The props.ct is a CoordinateTransform, not necessarily an InvertibleCoordinateTransform
// So the mesh is necessary to ensure the invertibility
final mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(props.ct, props.meshResolution, props.o_width, props.o_height);
/* // Apparently not needed; the inverse affine in step 1 took care of it.
* // (the affine of step 1 includes the mesh translation)
Rectangle box = mesh.getBoundingBox();
AffineModel2D aff = new AffineModel2D();
aff.set(new AffineTransform(1, 0, 0, 1, box.x, box.y));
tlist.add(aff);
*/
tlist.add(new InverseICT(mesh));
}
// 3. New coordinate transform of the Patch: from original image to new mipmap
final mpicbg.trakem2.transform.CoordinateTransform ct = patch.getCoordinateTransform();
if (null != ct) {
tlist.add(ct);
final mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(ct, patch.getMeshResolution(), patch.getOWidth(), patch.getOHeight());
// correct for mesh bounds -- Necessary because it comes from the other side, and the removal of the translation here is re-added by the affine in step 4!
final Rectangle box = mesh.getBoundingBox();
final AffineModel2D aff = new AffineModel2D();
aff.set(new AffineTransform(1, 0, 0, 1, -box.x, -box.y));
tlist.add(aff);
}
// 4. New affine transform of the Patch: from mipmap to world
final mpicbg.models.AffineModel2D new_aff = new mpicbg.models.AffineModel2D();
new_aff.set(patch.getAffineTransform());
tlist.add(new_aff);
/*
// TODO Consider caching the tlist for each Patch, or for a few thousand of them maximum.
// But it could blow up memory astronomically.
// The old part:
final mpicbg.models.InvertibleCoordinateTransformList old = new mpicbg.models.InvertibleCoordinateTransformList();
if (null != props.ct) {
mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(props.ct, props.meshResolution, props.o_width, props.o_height);
old.add(mesh);
}
final mpicbg.models.AffineModel2D old_aff = new mpicbg.models.AffineModel2D();
old_aff.set(props.at);
old.add(old_aff);
tlist.add(new InverseICT(old));
// The new part:
final mpicbg.models.AffineModel2D new_aff = new mpicbg.models.AffineModel2D();
new_aff.set(patch.getAffineTransform());
tlist.add(new_aff);
final mpicbg.trakem2.transform.CoordinateTransform ct = patch.getCoordinateTransform();
if (null != ct) tlist.add(ct);
*/
vdt.add(a, tlist);
}
// Apply the map of area vs tlist for the data section of d within the layer:
try {
((VectorData) d).apply(vdt);
} catch (final Exception t) {
Utils.log("ERROR transformation failed for " + d + " at layer " + layer);
IJError.print(t);
}
}
}
}));
}
Utils.wait(fus);
Display.repaint();
} finally {
exec.shutdown();
}
}
use of mpicbg.trakem2.transform.CoordinateTransformList in project TrakEM2 by trakem2.
the class Render method render.
/**
* Renders a patch, mapping its intensities [min, max] → [0, 1]
*
* @param patch the patch to be rendered
* @param targetImage target pixels, specifies the target box
* @param targetWeight target weight pixels, depending on alpha
* @param x target box offset in world coordinates
* @param y target box offset in world coordinates
* @param scale target scale
*/
public static final void render(final Patch patch, final int coefficientsWidth, final int coefficientsHeight, final FloatProcessor targetImage, final FloatProcessor targetWeight, final ColorProcessor targetCoefficients, final double x, final double y, final double scale) {
/* assemble coordinate transformations and add bounding box offset */
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
ctl.add(patch.getFullCoordinateTransform());
final AffineModel2D affineScale = new AffineModel2D();
affineScale.set(scale, 0, 0, scale, -x * scale, -y * scale);
ctl.add(affineScale);
/* estimate average scale and generate downsampled source */
final int width = patch.getOWidth(), height = patch.getOHeight();
final double s = sampleAverageScale(ctl, width, height, width / patch.getMeshResolution());
final int mipmapLevel = bestMipmapLevel(s);
final ImageProcessor ipMipmap = Downsampler.downsampleImageProcessor(patch.getImageProcessor(), mipmapLevel);
/* create a target */
final ImageProcessor tp = ipMipmap.createProcessor(targetImage.getWidth(), targetImage.getHeight());
/* prepare and downsample alpha mask if there is one */
final ByteProcessor bpMaskMipmap;
final ByteProcessor bpMaskTarget;
final ByteProcessor bpMask = patch.getAlphaMask();
if (bpMask == null) {
bpMaskMipmap = null;
bpMaskTarget = null;
} else {
bpMaskMipmap = bpMask == null ? null : Downsampler.downsampleByteProcessor(bpMask, mipmapLevel);
bpMaskTarget = new ByteProcessor(tp.getWidth(), tp.getHeight());
}
/* create coefficients map */
final ColorProcessor cp = new ColorProcessor(ipMipmap.getWidth(), ipMipmap.getHeight());
final int w = cp.getWidth();
final int h = cp.getHeight();
for (int yi = 0; yi < h; ++yi) {
final int yc = yi * coefficientsHeight / h;
final int ic = yc * coefficientsWidth;
final int iyi = yi * w;
for (int xi = 0; xi < w; ++xi) cp.set(iyi + xi, ic + (xi * coefficientsWidth / w) + 1);
}
/* attach mipmap transformation */
final CoordinateTransformList<CoordinateTransform> ctlMipmap = new CoordinateTransformList<CoordinateTransform>();
ctlMipmap.add(createScaleLevelTransform(mipmapLevel));
ctlMipmap.add(ctl);
/* create mesh */
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(ctlMipmap, patch.getMeshResolution(), ipMipmap.getWidth(), ipMipmap.getHeight());
/* render */
final ImageProcessorWithMasks source = new ImageProcessorWithMasks(ipMipmap, bpMaskMipmap, null);
final ImageProcessorWithMasks target = new ImageProcessorWithMasks(tp, bpMaskTarget, null);
final TransformMeshMappingWithMasks<TransformMesh> mapping = new TransformMeshMappingWithMasks<TransformMesh>(mesh);
mapping.mapInterpolated(source, target, 1);
final TransformMeshMapping<TransformMesh> coefficientsMapMapping = new TransformMeshMapping<TransformMesh>(mesh);
coefficientsMapMapping.map(cp, targetCoefficients, 1);
/* set alpha channel */
final byte[] alphaPixels;
if (bpMaskTarget != null)
alphaPixels = (byte[]) bpMaskTarget.getPixels();
else
alphaPixels = (byte[]) target.outside.getPixels();
/* convert */
final double min = patch.getMin();
final double max = patch.getMax();
final double a = 1.0 / (max - min);
final double b = 1.0 / 255.0;
for (int i = 0; i < alphaPixels.length; ++i) targetImage.setf(i, (float) ((tp.getf(i) - min) * a));
for (int i = 0; i < alphaPixels.length; ++i) targetWeight.setf(i, (float) ((alphaPixels[i] & 0xff) * b));
}
Aggregations