use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class ControlClickBehavior method doProcess.
@Override
public void doProcess(final MouseEvent e) {
if (!e.isControlDown() || e.getID() != MouseEvent.MOUSE_PRESSED) {
super.doProcess(e);
return;
}
final Picker picker = universe.getPicker();
final Content content = picker.getPickedContent(e.getX(), e.getY());
if (content == null)
return;
final Point3d p = picker.getPickPointGeometry(content, e);
if (p == null) {
Utils.log("No point was found on content " + content);
return;
}
final Display display = Display.getFront(ls.getProject());
if (display == null) {
// If there's no Display, just return...
return;
}
if (display.getLayerSet() != ls) {
Utils.log("The LayerSet instances do not match");
return;
}
if (ls == null) {
Utils.log("No LayerSet was found for the Display");
return;
}
final Calibration cal = ls.getCalibration();
if (cal == null) {
Utils.log("No calibration information was found for the LayerSet");
return;
}
final double scaledZ = p.z / cal.pixelWidth;
final Layer l = ls.getNearestLayer(scaledZ);
if (l == null) {
Utils.log("No layer was found nearest to " + scaledZ);
return;
}
final Coordinate<?> coordinate = new Coordinate<Object>(p.x / cal.pixelWidth, p.y / cal.pixelHeight, l, null);
display.center(coordinate);
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class ExportARGB method makeFlatImageARGBFromMipMaps.
/**
* Returns nonsense or throws an Exception if mipmaps are not available.
* Limited to 2GB arrays for the final image.
*
* @param patches
* @param roi
* @param backgroundValue
* @param scale
* @return
*/
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromMipMaps(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
final int width = (int) (roi.width * scale);
final int height = (int) (roi.height * scale);
// Process the three channels separately in order to use proper alpha composition
final ColorProcessor target = new ColorProcessor(width, height);
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(width, height);
targetMask.setInterpolationMethod(ImageProcessor.BILINEAR);
final Loader loader = patches.get(0).getProject().getLoader();
for (final Patch patch : patches) {
// MipMap image, already including any coordinate transforms and the alpha mask (if any), by definition.
final MipMapImage mipMap = loader.fetchImage(patch, scale);
// / DEBUG: is there an alpha channel at all?
// new ij.ImagePlus("alpha of " + patch.getTitle(), new ByteProcessor( mipMap.image.getWidth(null), mipMap.image.getHeight(null), new ColorProcessor( mipMap.image ).getChannel( 4 ))).show();
// Yes, there is, even though the mipmap images have the alpha pre-multiplied
// Work-around strange bug that makes mipmap-loaded images paint with 7-bit depth instead of 8-bit depth
final BufferedImage bi = new BufferedImage(mipMap.image.getWidth(null), mipMap.image.getHeight(null), BufferedImage.TYPE_INT_ARGB);
final Graphics2D g2d = bi.createGraphics();
g2d.drawImage(mipMap.image, 0, 0, null);
g2d.dispose();
final int[] pix = extractARGBIntArray(bi);
bi.flush();
// DEBUG: does the BufferedImage have the alpha channel?
// {
// final byte[] aa = new byte[pix.length];
// for (int i=0; i<aa.length; ++i) aa[i] = (byte)((pix[i] & 0xff000000) >> 24);
// new ij.ImagePlus("alpha of BI of " + patch.getTitle(), new ByteProcessor(bi.getWidth(), bi.getHeight(), aa)).show();
// }
// YES: the alpha, containing the outside too. All fine.
final ByteProcessor alpha;
final ColorProcessor rgb = new ColorProcessor(bi.getWidth(), bi.getHeight(), pix);
if (patch.hasAlphaChannel()) {
// The mipMap has the alpha channel in it, even if the alpha is pre-multiplied as well onto the images.
final byte[] a = new byte[pix.length];
for (int i = 0; i < a.length; ++i) {
a[i] = (byte) ((pix[i] & 0xff000000) >> 24);
}
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight(), a);
} else {
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight());
Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
}
// The affine to apply to the MipMap.image
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
at.scale(mipMap.scaleX, mipMap.scaleY);
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), bi.getWidth(), bi.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
// no interpolation
alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
rgb.setInterpolationMethod(ImageProcessor.BILINEAR);
mapping.map(rgb, alpha, target, targetMask);
}
return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class AlignTask method transformVectorData.
public static final void transformVectorData(final ReferenceData rd, /* The transformations of patches before alignment. */
final Collection<Displayable> vdata, /* The VectorData instances to transform along with images. */
final LayerSet target_layerset) /* The LayerSet in which the vdata and the transformed images exist. */
{
final ExecutorService exec = Utils.newFixedThreadPool("AlignTask-transformVectorData");
try {
final Collection<Future<?>> fus = new ArrayList<Future<?>>();
final HashMap<Long, Layer> lidm = new HashMap<Long, Layer>();
for (final Long lid : rd.src_layer_lids_used) {
final Layer la = target_layerset.getLayer(lid.longValue());
if (null == la) {
Utils.log("ERROR layer with id " + lid + " NOT FOUND in target layerset!");
continue;
}
lidm.put(lid, la);
}
for (final Map.Entry<Displayable, Map<Long, TreeMap<Integer, Long>>> ed : rd.underlying.entrySet()) {
// The VectorData instance to transform
final Displayable d = ed.getKey();
// Process Displayables concurrently:
fus.add(exec.submit(new Runnable() {
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void run() {
for (final Map.Entry<Long, TreeMap<Integer, Long>> el : ed.getValue().entrySet()) {
// The entry has the id of the layer and the stack-index-ordered list of Patch that intersect VectorData d in that Layer
final Layer layer = lidm.get(el.getKey());
if (null == layer) {
Utils.log("ERROR layer with id " + el.getKey() + " NOT FOUND in target layerset!");
continue;
}
// Utils.log("Editing Displayable " + d + " at layer " + layer);
// list of Patch ids affecting VectorData/Displayable d
final ArrayList<Long> pids = new ArrayList<Long>(el.getValue().values());
// so now Patch ids are sorted from top to bottom
Collections.reverse(pids);
// The area already processed in the layer
final Area used_area = new Area();
// The map of areas vs transforms for each area to apply to the VectorData, to its data within the layer only
final VectorDataTransform vdt = new VectorDataTransform(layer);
// The list of transforms to apply to each VectorData
for (final long pid : pids) {
// Find the Patch with id 'pid' in Layer 'la' of the target LayerSet:
final DBObject ob = layer.findById(pid);
if (null == ob || !(ob instanceof Patch)) {
Utils.log("ERROR layer with id " + layer.getId() + " DOES NOT CONTAIN a Patch with id " + pid);
continue;
}
final Patch patch = (Patch) ob;
// no need to synch, read only from now on
final Patch.TransformProperties props = rd.tp.get(pid);
if (null == props) {
Utils.log("ERROR: could not find any Patch.TransformProperties for patch " + patch);
continue;
}
final Area a = new Area(props.area);
a.subtract(used_area);
if (M.isEmpty(a)) {
// skipping fully occluded Patch
continue;
}
// Accumulate:
used_area.add(props.area);
// For the remaining area within this Layer, define a transform
// Generate a CoordinateTransformList that includes:
// 1 - an inverted transform from Patch coords to world coords
// 2 - the CoordinateTransform of the Patch, if any
// 3 - the AffineTransform of the Patch
//
// The idea is to first send the data from world to pixel space of the Patch, using the old transfroms,
// and then from pixel space of the Patch to world, using the new transforms.
final CoordinateTransformList tlist = new CoordinateTransformList();
// 1. Inverse of the old affine: from world into the old patch mipmap
final mpicbg.models.AffineModel2D aff_inv = new mpicbg.models.AffineModel2D();
try {
aff_inv.set(props.at.createInverse());
} catch (final NoninvertibleTransformException nite) {
Utils.log("ERROR: could not invert the affine transform for Patch " + patch);
IJError.print(nite);
continue;
}
tlist.add(aff_inv);
// 2. Inverse of the old coordinate transform of the Patch: from old mipmap to pixels in original image
if (null != props.ct) {
// The props.ct is a CoordinateTransform, not necessarily an InvertibleCoordinateTransform
// So the mesh is necessary to ensure the invertibility
final mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(props.ct, props.meshResolution, props.o_width, props.o_height);
/* // Apparently not needed; the inverse affine in step 1 took care of it.
* // (the affine of step 1 includes the mesh translation)
Rectangle box = mesh.getBoundingBox();
AffineModel2D aff = new AffineModel2D();
aff.set(new AffineTransform(1, 0, 0, 1, box.x, box.y));
tlist.add(aff);
*/
tlist.add(new InverseICT(mesh));
}
// 3. New coordinate transform of the Patch: from original image to new mipmap
final mpicbg.trakem2.transform.CoordinateTransform ct = patch.getCoordinateTransform();
if (null != ct) {
tlist.add(ct);
final mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(ct, patch.getMeshResolution(), patch.getOWidth(), patch.getOHeight());
// correct for mesh bounds -- Necessary because it comes from the other side, and the removal of the translation here is re-added by the affine in step 4!
final Rectangle box = mesh.getBoundingBox();
final AffineModel2D aff = new AffineModel2D();
aff.set(new AffineTransform(1, 0, 0, 1, -box.x, -box.y));
tlist.add(aff);
}
// 4. New affine transform of the Patch: from mipmap to world
final mpicbg.models.AffineModel2D new_aff = new mpicbg.models.AffineModel2D();
new_aff.set(patch.getAffineTransform());
tlist.add(new_aff);
/*
// TODO Consider caching the tlist for each Patch, or for a few thousand of them maximum.
// But it could blow up memory astronomically.
// The old part:
final mpicbg.models.InvertibleCoordinateTransformList old = new mpicbg.models.InvertibleCoordinateTransformList();
if (null != props.ct) {
mpicbg.trakem2.transform.TransformMesh mesh = new mpicbg.trakem2.transform.TransformMesh(props.ct, props.meshResolution, props.o_width, props.o_height);
old.add(mesh);
}
final mpicbg.models.AffineModel2D old_aff = new mpicbg.models.AffineModel2D();
old_aff.set(props.at);
old.add(old_aff);
tlist.add(new InverseICT(old));
// The new part:
final mpicbg.models.AffineModel2D new_aff = new mpicbg.models.AffineModel2D();
new_aff.set(patch.getAffineTransform());
tlist.add(new_aff);
final mpicbg.trakem2.transform.CoordinateTransform ct = patch.getCoordinateTransform();
if (null != ct) tlist.add(ct);
*/
vdt.add(a, tlist);
}
// Apply the map of area vs tlist for the data section of d within the layer:
try {
((VectorData) d).apply(vdt);
} catch (final Exception t) {
Utils.log("ERROR transformation failed for " + d + " at layer " + layer);
IJError.print(t);
}
}
}
}));
}
Utils.wait(fus);
Display.repaint();
} finally {
exec.shutdown();
}
}
Aggregations