use of mpicbg.models.CoordinateTransformMesh in project TrakEM2 by trakem2.
the class ExportARGB method makeFlatImageARGBFromOriginals.
/**
* Limited to 2GB arrays for the requested image.
*
* @param patches
* @param roi
* @param backgroundValue
* @param scale
* @return
*/
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromOriginals(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
final ColorProcessor target = new ColorProcessor((int) (roi.width * scale), (int) (roi.height * scale));
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(target.getWidth(), target.getHeight());
targetMask.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
for (final Patch patch : patches) {
final Patch.PatchImage pai = patch.createTransformedImage();
final ColorProcessor fp = (ColorProcessor) pai.target.convertToRGB();
final ByteProcessor alpha;
System.out.println("IMAGE:" + patch.getTitle());
System.out.println("mask: " + pai.mask);
System.out.println("outside: " + pai.outside);
if (null == pai.mask) {
if (null == pai.outside) {
alpha = new ByteProcessor(fp.getWidth(), fp.getHeight());
// fully opaque
Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
} else {
alpha = pai.outside;
}
} else {
alpha = pai.mask;
}
// The affine to apply
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), fp.getWidth(), fp.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
fp.setInterpolationMethod(ImageProcessor.BILINEAR);
// no interpolation
alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
mapping.map(fp, alpha, target, targetMask);
}
return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
use of mpicbg.models.CoordinateTransformMesh in project TrakEM2 by trakem2.
the class NonLinearTransformMode method mousePressed.
@Override
public void mousePressed(final MouseEvent me, final int x_p, final int y_p, final double magnification) {
/* find if clicked on a point */
p_clicked = null;
double min = Double.MAX_VALUE;
final Point mouse = new Point(new double[] { x_p, y_p });
final double a = 64.0 / magnification / magnification;
for (final Point p : points) {
final double sd = Point.squareDistance(p, mouse);
if (sd < min && sd < a) {
p_clicked = p;
min = sd;
}
}
if (me.isShiftDown()) {
if (null == p_clicked) {
/* add one */
try {
if (points.size() > 0) {
/*
* Create a pseudo-invertible (TransformMesh) for the screen.
*/
final CoordinateTransform mlst = createCT();
final SimilarityModel2D toWorld = new SimilarityModel2D();
toWorld.set(1.0 / magnification, 0, srcRect.x, srcRect.y);
final SimilarityModel2D toScreen = toWorld.createInverse();
final mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform> ctl = new mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform>();
ctl.add(toWorld);
ctl.add(mlst);
ctl.add(toScreen);
final CoordinateTransformMesh ctm = new CoordinateTransformMesh(ctl, 32, (int) Math.ceil(srcRect.width * magnification), (int) Math.ceil(srcRect.height * magnification));
final double[] l = mouse.getL();
toScreen.applyInPlace(l);
ctm.applyInverseInPlace(l);
toWorld.applyInPlace(l);
}
points.add(mouse);
p_clicked = mouse;
} catch (final Exception e) {
Utils.log("Could not add point");
e.printStackTrace();
}
} else if (Utils.isControlDown(me)) {
// remove it
points.remove(p_clicked);
p_clicked = null;
}
}
}
use of mpicbg.models.CoordinateTransformMesh in project TrakEM2 by trakem2.
the class Patch method makeFlatImage.
/**
* Creates an ImageProcessor of the specified type.
* @param type Any of ImagePlus.GRAY_8, GRAY_16, GRAY_32 or COLOR_RGB.
* @param srcRect the box in world coordinates to make an image out of.
* @param scale may be up to 1.0.
* @param patches The list of patches to paint. The first gets painted first (at the bottom).
* @param background The color with which to paint the outsides where no image paints into.
* @param setMinAndMax defines whether the min and max of each Patch is set before pasting the Patch.
*
* For exporting while blending the display ranges (min,max) and respecting alpha masks, see {@link ExportUnsignedShort}.
*/
public static ImageProcessor makeFlatImage(final int type, final Layer layer, final Rectangle srcRect, final double scale, final Collection<Patch> patches, final Color background, final boolean setMinAndMax) {
final ImageProcessor ip;
final int W, H;
if (scale < 1) {
W = (int) (srcRect.width * scale);
H = (int) (srcRect.height * scale);
} else {
W = srcRect.width;
H = srcRect.height;
}
switch(type) {
case ImagePlus.GRAY8:
ip = new ByteProcessor(W, H);
break;
case ImagePlus.GRAY16:
ip = new ShortProcessor(W, H);
break;
case ImagePlus.GRAY32:
ip = new FloatProcessor(W, H);
break;
case ImagePlus.COLOR_RGB:
ip = new ColorProcessor(W, H);
break;
default:
Utils.logAll("Cannot create an image of type " + type + ".\nSupported types: 8-bit, 16-bit, 32-bit and RGB.");
return null;
}
// Fill with background
if (null != background && Color.black != background) {
ip.setColor(background);
ip.fill();
}
AffineModel2D sc = null;
if (scale < 1.0) {
sc = new AffineModel2D();
sc.set(scale, 0, 0, scale, 0, 0);
}
for (final Patch p : patches) {
// TODO patches seem to come in in inverse order---find out why
// A list to represent all the transformations that the Patch image has to go through to reach the scaled srcRect image
final CoordinateTransformList<CoordinateTransform> list = new CoordinateTransformList<CoordinateTransform>();
final AffineTransform at = new AffineTransform();
at.translate(-srcRect.x, -srcRect.y);
at.concatenate(p.getAffineTransform());
// 1. The coordinate tranform of the Patch, if any
if (p.hasCoordinateTransform()) {
final CoordinateTransform ct = p.getCoordinateTransform();
list.add(ct);
// Remove the translation in the patch_affine that the ct added to it
final Rectangle box = Patch.getCoordinateTransformBoundingBox(p, ct);
at.translate(-box.x, -box.y);
}
// 2. The affine transform of the Patch
final AffineModel2D patch_affine = new AffineModel2D();
patch_affine.set(at);
list.add(patch_affine);
// 3. The desired scaling
if (null != sc)
patch_affine.preConcatenate(sc);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(list, p.meshResolution, p.getOWidth(), p.getOHeight());
final mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh> mapping = new mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh>(mesh);
// 4. Convert the patch to the required type
ImageProcessor pi = p.getImageProcessor();
if (setMinAndMax) {
pi = pi.duplicate();
pi.setMinAndMax(p.min, p.max);
}
switch(type) {
case ImagePlus.GRAY8:
pi = pi.convertToByte(true);
break;
case ImagePlus.GRAY16:
pi = pi.convertToShort(true);
break;
case ImagePlus.GRAY32:
pi = pi.convertToFloat();
break;
default:
// ImagePlus.COLOR_RGB and COLOR_256
pi = pi.convertToRGB();
break;
}
/* TODO for taking into account independent min/max setting for each patch,
* we will need a mapping with an `intensity transfer function' to be implemented.
* --> EXISTS already as mpicbg/trakem2/transform/ExportUnsignedShort.java
*/
mapping.mapInterpolated(pi, ip);
}
return ip;
}
use of mpicbg.models.CoordinateTransformMesh in project TrakEM2 by trakem2.
the class NonLinearTransformMode method doPainterUpdate.
@Override
protected void doPainterUpdate(final Rectangle r, final double m) {
try {
final CoordinateTransform mlst = createCT();
final SimilarityModel2D toWorld = new SimilarityModel2D();
toWorld.set(1.0 / m, 0, r.x - ScreenPatchRange.pad / m, r.y - ScreenPatchRange.pad / m);
final mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform> ctl = new mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform>();
ctl.add(toWorld);
ctl.add(mlst);
ctl.add(toWorld.createInverse());
final CoordinateTransformMesh ctm = new CoordinateTransformMesh(ctl, 32, r.width * m + 2 * ScreenPatchRange.pad, r.height * m + 2 * ScreenPatchRange.pad);
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(ctm);
// keep a pointer to the current list
final HashMap<Paintable, GroupingMode.ScreenPatchRange<?>> screenPatchRanges = this.screenPatchRanges;
for (final GroupingMode.ScreenPatchRange spr : screenPatchRanges.values()) {
if (screenPatchRanges != this.screenPatchRanges) {
// TODO should it call itself: doPainterUpdate( r, m );
break;
}
spr.update(mapping);
}
} catch (final NotEnoughDataPointsException e) {
} catch (final NoninvertibleModelException e) {
} catch (final IllDefinedDataPointsException e) {
} catch (final Exception e) {
e.printStackTrace();
}
}
use of mpicbg.models.CoordinateTransformMesh in project TrakEM2 by trakem2.
the class ExportARGB method makeFlatImageARGBFromMipMaps.
/**
* Returns nonsense or throws an Exception if mipmaps are not available.
* Limited to 2GB arrays for the final image.
*
* @param patches
* @param roi
* @param backgroundValue
* @param scale
* @return
*/
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromMipMaps(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
final int width = (int) (roi.width * scale);
final int height = (int) (roi.height * scale);
// Process the three channels separately in order to use proper alpha composition
final ColorProcessor target = new ColorProcessor(width, height);
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(width, height);
targetMask.setInterpolationMethod(ImageProcessor.BILINEAR);
final Loader loader = patches.get(0).getProject().getLoader();
for (final Patch patch : patches) {
// MipMap image, already including any coordinate transforms and the alpha mask (if any), by definition.
final MipMapImage mipMap = loader.fetchImage(patch, scale);
// / DEBUG: is there an alpha channel at all?
// new ij.ImagePlus("alpha of " + patch.getTitle(), new ByteProcessor( mipMap.image.getWidth(null), mipMap.image.getHeight(null), new ColorProcessor( mipMap.image ).getChannel( 4 ))).show();
// Yes, there is, even though the mipmap images have the alpha pre-multiplied
// Work-around strange bug that makes mipmap-loaded images paint with 7-bit depth instead of 8-bit depth
final BufferedImage bi = new BufferedImage(mipMap.image.getWidth(null), mipMap.image.getHeight(null), BufferedImage.TYPE_INT_ARGB);
final Graphics2D g2d = bi.createGraphics();
g2d.drawImage(mipMap.image, 0, 0, null);
g2d.dispose();
final int[] pix = extractARGBIntArray(bi);
bi.flush();
// DEBUG: does the BufferedImage have the alpha channel?
// {
// final byte[] aa = new byte[pix.length];
// for (int i=0; i<aa.length; ++i) aa[i] = (byte)((pix[i] & 0xff000000) >> 24);
// new ij.ImagePlus("alpha of BI of " + patch.getTitle(), new ByteProcessor(bi.getWidth(), bi.getHeight(), aa)).show();
// }
// YES: the alpha, containing the outside too. All fine.
final ByteProcessor alpha;
final ColorProcessor rgb = new ColorProcessor(bi.getWidth(), bi.getHeight(), pix);
if (patch.hasAlphaChannel()) {
// The mipMap has the alpha channel in it, even if the alpha is pre-multiplied as well onto the images.
final byte[] a = new byte[pix.length];
for (int i = 0; i < a.length; ++i) {
a[i] = (byte) ((pix[i] & 0xff000000) >> 24);
}
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight(), a);
} else {
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight());
Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
}
// The affine to apply to the MipMap.image
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
at.scale(mipMap.scaleX, mipMap.scaleY);
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), bi.getWidth(), bi.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
// no interpolation
alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
rgb.setInterpolationMethod(ImageProcessor.BILINEAR);
mapping.map(rgb, alpha, target, targetMask);
}
return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
Aggregations