use of mpicbg.trakem2.transform.TransformMeshMappingWithMasks in project TrakEM2 by trakem2.
the class ExportARGB method makeFlatImageARGBFromOriginals.
/**
* Limited to 2GB arrays for the requested image.
*
* @param patches
* @param roi
* @param backgroundValue
* @param scale
* @return
*/
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromOriginals(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
final ColorProcessor target = new ColorProcessor((int) (roi.width * scale), (int) (roi.height * scale));
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(target.getWidth(), target.getHeight());
targetMask.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
for (final Patch patch : patches) {
final Patch.PatchImage pai = patch.createTransformedImage();
final ColorProcessor fp = (ColorProcessor) pai.target.convertToRGB();
final ByteProcessor alpha;
System.out.println("IMAGE:" + patch.getTitle());
System.out.println("mask: " + pai.mask);
System.out.println("outside: " + pai.outside);
if (null == pai.mask) {
if (null == pai.outside) {
alpha = new ByteProcessor(fp.getWidth(), fp.getHeight());
// fully opaque
Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
} else {
alpha = pai.outside;
}
} else {
alpha = pai.mask;
}
// The affine to apply
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), fp.getWidth(), fp.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
fp.setInterpolationMethod(ImageProcessor.BILINEAR);
// no interpolation
alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
mapping.map(fp, alpha, target, targetMask);
}
return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
use of mpicbg.trakem2.transform.TransformMeshMappingWithMasks in project TrakEM2 by trakem2.
the class NonLinearTransformMode method doPainterUpdate.
@Override
protected void doPainterUpdate(final Rectangle r, final double m) {
try {
final CoordinateTransform mlst = createCT();
final SimilarityModel2D toWorld = new SimilarityModel2D();
toWorld.set(1.0 / m, 0, r.x - ScreenPatchRange.pad / m, r.y - ScreenPatchRange.pad / m);
final mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform> ctl = new mpicbg.models.CoordinateTransformList<mpicbg.models.CoordinateTransform>();
ctl.add(toWorld);
ctl.add(mlst);
ctl.add(toWorld.createInverse());
final CoordinateTransformMesh ctm = new CoordinateTransformMesh(ctl, 32, r.width * m + 2 * ScreenPatchRange.pad, r.height * m + 2 * ScreenPatchRange.pad);
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(ctm);
// keep a pointer to the current list
final HashMap<Paintable, GroupingMode.ScreenPatchRange<?>> screenPatchRanges = this.screenPatchRanges;
for (final GroupingMode.ScreenPatchRange spr : screenPatchRanges.values()) {
if (screenPatchRanges != this.screenPatchRanges) {
// TODO should it call itself: doPainterUpdate( r, m );
break;
}
spr.update(mapping);
}
} catch (final NotEnoughDataPointsException e) {
} catch (final NoninvertibleModelException e) {
} catch (final IllDefinedDataPointsException e) {
} catch (final Exception e) {
e.printStackTrace();
}
}
use of mpicbg.trakem2.transform.TransformMeshMappingWithMasks in project TrakEM2 by trakem2.
the class ExportARGB method makeFlatImageARGBFromMipMaps.
/**
* Returns nonsense or throws an Exception if mipmaps are not available.
* Limited to 2GB arrays for the final image.
*
* @param patches
* @param roi
* @param backgroundValue
* @param scale
* @return
*/
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromMipMaps(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
final int width = (int) (roi.width * scale);
final int height = (int) (roi.height * scale);
// Process the three channels separately in order to use proper alpha composition
final ColorProcessor target = new ColorProcessor(width, height);
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(width, height);
targetMask.setInterpolationMethod(ImageProcessor.BILINEAR);
final Loader loader = patches.get(0).getProject().getLoader();
for (final Patch patch : patches) {
// MipMap image, already including any coordinate transforms and the alpha mask (if any), by definition.
final MipMapImage mipMap = loader.fetchImage(patch, scale);
// / DEBUG: is there an alpha channel at all?
// new ij.ImagePlus("alpha of " + patch.getTitle(), new ByteProcessor( mipMap.image.getWidth(null), mipMap.image.getHeight(null), new ColorProcessor( mipMap.image ).getChannel( 4 ))).show();
// Yes, there is, even though the mipmap images have the alpha pre-multiplied
// Work-around strange bug that makes mipmap-loaded images paint with 7-bit depth instead of 8-bit depth
final BufferedImage bi = new BufferedImage(mipMap.image.getWidth(null), mipMap.image.getHeight(null), BufferedImage.TYPE_INT_ARGB);
final Graphics2D g2d = bi.createGraphics();
g2d.drawImage(mipMap.image, 0, 0, null);
g2d.dispose();
final int[] pix = extractARGBIntArray(bi);
bi.flush();
// DEBUG: does the BufferedImage have the alpha channel?
// {
// final byte[] aa = new byte[pix.length];
// for (int i=0; i<aa.length; ++i) aa[i] = (byte)((pix[i] & 0xff000000) >> 24);
// new ij.ImagePlus("alpha of BI of " + patch.getTitle(), new ByteProcessor(bi.getWidth(), bi.getHeight(), aa)).show();
// }
// YES: the alpha, containing the outside too. All fine.
final ByteProcessor alpha;
final ColorProcessor rgb = new ColorProcessor(bi.getWidth(), bi.getHeight(), pix);
if (patch.hasAlphaChannel()) {
// The mipMap has the alpha channel in it, even if the alpha is pre-multiplied as well onto the images.
final byte[] a = new byte[pix.length];
for (int i = 0; i < a.length; ++i) {
a[i] = (byte) ((pix[i] & 0xff000000) >> 24);
}
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight(), a);
} else {
alpha = new ByteProcessor(bi.getWidth(), bi.getHeight());
Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
}
// The affine to apply to the MipMap.image
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
at.scale(mipMap.scaleX, mipMap.scaleY);
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), bi.getWidth(), bi.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
// no interpolation
alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
rgb.setInterpolationMethod(ImageProcessor.BILINEAR);
mapping.map(rgb, alpha, target, targetMask);
}
return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
use of mpicbg.trakem2.transform.TransformMeshMappingWithMasks in project TrakEM2 by trakem2.
the class ExportUnsignedByte method makeFlatImage.
public static final Pair<ByteProcessor, ByteProcessor> makeFlatImage(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale, final ImageSource fetcher) {
final ByteProcessor target = new ByteProcessor((int) (roi.width * scale), (int) (roi.height * scale));
target.setInterpolationMethod(ImageProcessor.BILINEAR);
final ByteProcessor targetMask = new ByteProcessor(target.getWidth(), target.getHeight());
targetMask.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
for (final Patch patch : patches) {
final ImageData imgd = fetcher.fetch(patch, scale);
// The affine to apply to the MipMap.image
final AffineTransform atc = new AffineTransform();
atc.scale(scale, scale);
atc.translate(-roi.x, -roi.y);
final AffineTransform at = new AffineTransform();
at.preConcatenate(atc);
at.concatenate(patch.getAffineTransform());
at.scale(imgd.scaleX, imgd.scaleY);
final AffineModel2D aff = new AffineModel2D();
aff.set(at);
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), imgd.bp.getWidth(), imgd.bp.getHeight());
final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
imgd.bp.setInterpolationMethod(ImageProcessor.BILINEAR);
// no interpolation
imgd.alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
mapping.map(imgd.bp, imgd.alpha, target, targetMask);
}
return new Pair<ByteProcessor, ByteProcessor>(target, targetMask);
}
use of mpicbg.trakem2.transform.TransformMeshMappingWithMasks in project TrakEM2 by trakem2.
the class Render method render.
/**
* Renders a patch, mapping its intensities [min, max] → [0, 1]
*
* @param patch the patch to be rendered
* @param targetImage target pixels, specifies the target box
* @param targetWeight target weight pixels, depending on alpha
* @param x target box offset in world coordinates
* @param y target box offset in world coordinates
* @param scale target scale
*/
public static final void render(final Patch patch, final int coefficientsWidth, final int coefficientsHeight, final FloatProcessor targetImage, final FloatProcessor targetWeight, final ColorProcessor targetCoefficients, final double x, final double y, final double scale) {
/* assemble coordinate transformations and add bounding box offset */
final CoordinateTransformList<CoordinateTransform> ctl = new CoordinateTransformList<CoordinateTransform>();
ctl.add(patch.getFullCoordinateTransform());
final AffineModel2D affineScale = new AffineModel2D();
affineScale.set(scale, 0, 0, scale, -x * scale, -y * scale);
ctl.add(affineScale);
/* estimate average scale and generate downsampled source */
final int width = patch.getOWidth(), height = patch.getOHeight();
final double s = sampleAverageScale(ctl, width, height, width / patch.getMeshResolution());
final int mipmapLevel = bestMipmapLevel(s);
final ImageProcessor ipMipmap = Downsampler.downsampleImageProcessor(patch.getImageProcessor(), mipmapLevel);
/* create a target */
final ImageProcessor tp = ipMipmap.createProcessor(targetImage.getWidth(), targetImage.getHeight());
/* prepare and downsample alpha mask if there is one */
final ByteProcessor bpMaskMipmap;
final ByteProcessor bpMaskTarget;
final ByteProcessor bpMask = patch.getAlphaMask();
if (bpMask == null) {
bpMaskMipmap = null;
bpMaskTarget = null;
} else {
bpMaskMipmap = bpMask == null ? null : Downsampler.downsampleByteProcessor(bpMask, mipmapLevel);
bpMaskTarget = new ByteProcessor(tp.getWidth(), tp.getHeight());
}
/* create coefficients map */
final ColorProcessor cp = new ColorProcessor(ipMipmap.getWidth(), ipMipmap.getHeight());
final int w = cp.getWidth();
final int h = cp.getHeight();
for (int yi = 0; yi < h; ++yi) {
final int yc = yi * coefficientsHeight / h;
final int ic = yc * coefficientsWidth;
final int iyi = yi * w;
for (int xi = 0; xi < w; ++xi) cp.set(iyi + xi, ic + (xi * coefficientsWidth / w) + 1);
}
/* attach mipmap transformation */
final CoordinateTransformList<CoordinateTransform> ctlMipmap = new CoordinateTransformList<CoordinateTransform>();
ctlMipmap.add(createScaleLevelTransform(mipmapLevel));
ctlMipmap.add(ctl);
/* create mesh */
final CoordinateTransformMesh mesh = new CoordinateTransformMesh(ctlMipmap, patch.getMeshResolution(), ipMipmap.getWidth(), ipMipmap.getHeight());
/* render */
final ImageProcessorWithMasks source = new ImageProcessorWithMasks(ipMipmap, bpMaskMipmap, null);
final ImageProcessorWithMasks target = new ImageProcessorWithMasks(tp, bpMaskTarget, null);
final TransformMeshMappingWithMasks<TransformMesh> mapping = new TransformMeshMappingWithMasks<TransformMesh>(mesh);
mapping.mapInterpolated(source, target, 1);
final TransformMeshMapping<TransformMesh> coefficientsMapMapping = new TransformMeshMapping<TransformMesh>(mesh);
coefficientsMapMapping.map(cp, targetCoefficients, 1);
/* set alpha channel */
final byte[] alphaPixels;
if (bpMaskTarget != null)
alphaPixels = (byte[]) bpMaskTarget.getPixels();
else
alphaPixels = (byte[]) target.outside.getPixels();
/* convert */
final double min = patch.getMin();
final double max = patch.getMax();
final double a = 1.0 / (max - min);
final double b = 1.0 / 255.0;
for (int i = 0; i < alphaPixels.length; ++i) targetImage.setf(i, (float) ((tp.getf(i) - min) * a));
for (int i = 0; i < alphaPixels.length; ++i) targetWeight.setf(i, (float) ((alphaPixels[i] & 0xff) * b));
}
Aggregations