Search in sources :

Example 11 with ByteProcessor

use of ij.process.ByteProcessor in project bioformats by openmicroscopy.

the class ImageProcessorReader method openProcessors.

/**
 * Returns an array of ImageProcessors that represent the given slice.
 * There is one ImageProcessor per RGB channel;
 * i.e., length of returned array == getRGBChannelCount().
 *
 * @param no Position of image plane.
 */
public ImageProcessor[] openProcessors(int no, int x, int y, int w, int h) throws FormatException, IOException {
    // read byte array
    byte[] b = openBytes(no, x, y, w, h);
    int c = getRGBChannelCount();
    int type = getPixelType();
    int bpp = FormatTools.getBytesPerPixel(type);
    boolean interleave = isInterleaved();
    if (b.length != w * h * c * bpp && b.length != w * h * bpp) {
        throw new FormatException("Invalid byte array length: " + b.length + " (expected w=" + w + ", h=" + h + ", c=" + c + ", bpp=" + bpp + ")");
    }
    // create a color model for this plane (null means default)
    final LUT cm = createColorModel();
    // convert byte array to appropriate primitive array type
    boolean isFloat = FormatTools.isFloatingPoint(type);
    boolean isLittle = isLittleEndian();
    boolean isSigned = FormatTools.isSigned(type);
    // construct image processors
    ImageProcessor[] ip = new ImageProcessor[c];
    for (int i = 0; i < c; i++) {
        byte[] channel = ImageTools.splitChannels(b, i, c, bpp, false, interleave);
        Object pixels = DataTools.makeDataArray(channel, bpp, isFloat, isLittle);
        if (pixels instanceof byte[]) {
            byte[] q = (byte[]) pixels;
            if (q.length != w * h) {
                byte[] tmp = q;
                q = new byte[w * h];
                System.arraycopy(tmp, 0, q, 0, Math.min(q.length, tmp.length));
            }
            if (isSigned)
                q = DataTools.makeSigned(q);
            ip[i] = new ByteProcessor(w, h, q, null);
            if (cm != null)
                ip[i].setColorModel(cm);
        } else if (pixels instanceof short[]) {
            short[] q = (short[]) pixels;
            if (q.length != w * h) {
                short[] tmp = q;
                q = new short[w * h];
                System.arraycopy(tmp, 0, q, 0, Math.min(q.length, tmp.length));
            }
            if (isSigned)
                q = DataTools.makeSigned(q);
            ip[i] = new ShortProcessor(w, h, q, cm);
        } else if (pixels instanceof int[]) {
            int[] q = (int[]) pixels;
            if (q.length != w * h) {
                int[] tmp = q;
                q = new int[w * h];
                System.arraycopy(tmp, 0, q, 0, Math.min(q.length, tmp.length));
            }
            ip[i] = new FloatProcessor(w, h, q);
        } else if (pixels instanceof float[]) {
            float[] q = (float[]) pixels;
            if (q.length != w * h) {
                float[] tmp = q;
                q = new float[w * h];
                System.arraycopy(tmp, 0, q, 0, Math.min(q.length, tmp.length));
            }
            ip[i] = new FloatProcessor(w, h, q, null);
        } else if (pixels instanceof double[]) {
            double[] q = (double[]) pixels;
            if (q.length != w * h) {
                double[] tmp = q;
                q = new double[w * h];
                System.arraycopy(tmp, 0, q, 0, Math.min(q.length, tmp.length));
            }
            ip[i] = new FloatProcessor(w, h, q);
        }
    }
    return ip;
}
Also used : ByteProcessor(ij.process.ByteProcessor) FloatProcessor(ij.process.FloatProcessor) LUT(ij.process.LUT) FormatException(loci.formats.FormatException) ShortProcessor(ij.process.ShortProcessor) ImageProcessor(ij.process.ImageProcessor)

Example 12 with ByteProcessor

use of ij.process.ByteProcessor in project TrakEM2 by trakem2.

the class DownsamplerMipMaps method create.

public static final ImageBytes[] create(final Patch patch, final int type, final int n_levels, final ImageProcessor ip, final ByteProcessor alpha, final ByteProcessor outside) {
    // Create pyramid
    final ImageBytes[] p = new ImageBytes[n_levels];
    if (null == alpha && null == outside) {
        int i = 1;
        switch(type) {
            case ImagePlus.GRAY8:
                ByteProcessor bp = (ByteProcessor) ip;
                p[0] = asBytes(bp);
                while (i < p.length) {
                    bp = Downsampler.downsampleByteProcessor(bp);
                    p[i++] = asBytes(bp);
                }
                break;
            case ImagePlus.GRAY16:
                ShortProcessor sp = (ShortProcessor) ip;
                p[0] = asBytes(sp);
                Pair<ShortProcessor, byte[]> rs;
                while (i < p.length) {
                    rs = Downsampler.downsampleShort(sp);
                    sp = rs.a;
                    p[i++] = new ImageBytes(new byte[][] { rs.b }, sp.getWidth(), sp.getHeight());
                }
                break;
            case ImagePlus.GRAY32:
                FloatProcessor fp = (FloatProcessor) ip;
                p[0] = asBytes(fp);
                Pair<FloatProcessor, byte[]> rf;
                while (i < p.length) {
                    rf = Downsampler.downsampleFloat(fp);
                    fp = rf.a;
                    p[i++] = new ImageBytes(new byte[][] { rf.b }, fp.getWidth(), fp.getHeight());
                }
                break;
            case ImagePlus.COLOR_RGB:
                ColorProcessor cp = (ColorProcessor) ip;
                // TODO the int[] could be reused
                p[0] = asBytes(cp);
                Pair<ColorProcessor, byte[][]> rc;
                while (i < p.length) {
                    rc = Downsampler.downsampleColor(cp);
                    cp = rc.a;
                    p[i++] = new ImageBytes(rc.b, cp.getWidth(), cp.getHeight());
                }
                break;
        }
    } else {
        // Alpha channel
        final ByteProcessor[] masks = new ByteProcessor[p.length];
        if (null != alpha && null != outside) {
            // Use both alpha and outside:
            final byte[] b1 = (byte[]) alpha.getPixels(), b2 = (byte[]) outside.getPixels();
            for (int i = 0; i < b1.length; ++i) {
                // 'outside' is a binary mask, qualitative. -1 means 255
                b1[i] = b2[i] != -1 ? 0 : b1[i];
            }
            masks[0] = alpha;
            // 
            int i = 1;
            Pair<ByteProcessor, ByteProcessor> pair;
            ByteProcessor a = alpha, o = outside;
            while (i < p.length) {
                pair = Downsampler.downsampleAlphaAndOutside(a, o);
                a = pair.a;
                o = pair.b;
                // o is already combined into it
                masks[i] = a;
                ++i;
            }
        } else {
            // Only one of the two is not null:
            if (null == alpha) {
                masks[0] = outside;
                int i = 1;
                while (i < p.length) {
                    masks[i] = Downsampler.downsampleOutside(masks[i - 1]);
                    ++i;
                }
            } else {
                masks[0] = alpha;
                int i = 1;
                while (i < p.length) {
                    masks[i] = Downsampler.downsampleByteProcessor(masks[i - 1]);
                    ++i;
                }
            }
        }
        // Image channels
        int i = 1;
        switch(type) {
            case ImagePlus.GRAY8:
                ByteProcessor bp = (ByteProcessor) ip;
                p[0] = asBytes(bp, masks[0]);
                while (i < p.length) {
                    bp = Downsampler.downsampleByteProcessor(bp);
                    p[i] = asBytes(bp, masks[i]);
                    ++i;
                }
                break;
            case ImagePlus.GRAY16:
                ShortProcessor sp = (ShortProcessor) ip;
                p[0] = asBytes(sp, masks[0]);
                while (i < p.length) {
                    final Pair<ShortProcessor, byte[]> rs = Downsampler.downsampleShort(sp);
                    sp = rs.a;
                    p[i] = new ImageBytes(new byte[][] { rs.b, (byte[]) masks[i].getPixels() }, sp.getWidth(), sp.getHeight());
                    ++i;
                }
                break;
            case ImagePlus.GRAY32:
                FloatProcessor fp = (FloatProcessor) ip;
                p[0] = asBytes(fp, masks[0]);
                while (i < p.length) {
                    final Pair<FloatProcessor, byte[]> rs = Downsampler.downsampleFloat(fp);
                    fp = rs.a;
                    p[i] = new ImageBytes(new byte[][] { rs.b, (byte[]) masks[i].getPixels() }, fp.getWidth(), fp.getHeight());
                    ++i;
                }
                break;
            case ImagePlus.COLOR_RGB:
                ColorProcessor cp = (ColorProcessor) ip;
                // TODO the int[] could be reused
                p[0] = asBytes(cp, masks[0]);
                while (i < p.length) {
                    final Pair<ColorProcessor, byte[][]> rs = Downsampler.downsampleColor(cp);
                    cp = rs.a;
                    final byte[][] rgb = rs.b;
                    p[i] = new ImageBytes(new byte[][] { rgb[0], rgb[1], rgb[2], (byte[]) masks[i].getPixels() }, cp.getWidth(), cp.getHeight());
                    ++i;
                }
                break;
        }
    }
    return p;
}
Also used : ByteProcessor(ij.process.ByteProcessor) FloatProcessor(ij.process.FloatProcessor) ShortProcessor(ij.process.ShortProcessor) ColorProcessor(ij.process.ColorProcessor)

Example 13 with ByteProcessor

use of ij.process.ByteProcessor in project TrakEM2 by trakem2.

the class ElasticMontage method exec.

@SuppressWarnings("deprecation")
public final void exec(final Param param, final List<Patch> patches, final Set<Patch> fixedPatches) throws Exception {
    /* free memory */
    patches.get(0).getProject().getLoader().releaseAll();
    /* create tiles and models for all patches */
    final ArrayList<AbstractAffineTile2D<?>> tiles = new ArrayList<AbstractAffineTile2D<?>>();
    final ArrayList<AbstractAffineTile2D<?>> fixedTiles = new ArrayList<AbstractAffineTile2D<?>>();
    Align.tilesFromPatches(param.po, patches, fixedPatches, tiles, fixedTiles);
    if (!param.isAligned) {
        Align.alignTiles(param.po, tiles, fixedTiles, param.tilesAreInPlace, param.maxNumThreads);
        /* Apply the estimated affine transform to patches */
        for (final AbstractAffineTile2D<?> t : tiles) t.getPatch().setAffineTransform(t.createAffine());
        Display.update();
    }
    /* generate tile pairs for all by now overlapping tiles */
    final ArrayList<AbstractAffineTile2D<?>[]> tilePairs = new ArrayList<AbstractAffineTile2D<?>[]>();
    AbstractAffineTile2D.pairOverlappingTiles(tiles, tilePairs);
    /* check if there was any pair */
    if (tilePairs.size() == 0) {
        Utils.log("Elastic montage could not find any overlapping patches after pre-montaging.");
        return;
    }
    Utils.log(tilePairs.size() + " pairs of patches will be block-matched...");
    /* make pairwise global models local */
    final ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>> pairs = new ArrayList<Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>>();
    /*
		 * The following casting madness is necessary to get this code compiled
		 * with Sun/Oracle Java 6 which otherwise generates an inconvertible
		 * type exception.
		 *
		 * TODO Remove as soon as this bug is fixed in Sun/Oracle javac.
		 */
    for (final AbstractAffineTile2D<?>[] pair : tilePairs) {
        final AbstractAffineModel2D<?> m;
        switch(param.po.desiredModelIndex) {
            case 0:
                final TranslationModel2D t = (TranslationModel2D) (Object) pair[1].getModel().createInverse();
                t.concatenate((TranslationModel2D) (Object) pair[0].getModel());
                m = t;
                break;
            case 1:
                final RigidModel2D r = (RigidModel2D) (Object) pair[1].getModel().createInverse();
                r.concatenate((RigidModel2D) (Object) pair[0].getModel());
                m = r;
                break;
            case 2:
                final SimilarityModel2D s = (SimilarityModel2D) (Object) pair[1].getModel().createInverse();
                s.concatenate((SimilarityModel2D) (Object) pair[0].getModel());
                m = s;
                break;
            case 3:
                final AffineModel2D a = (AffineModel2D) (Object) pair[1].getModel().createInverse();
                a.concatenate((AffineModel2D) (Object) pair[0].getModel());
                m = a;
                break;
            default:
                m = null;
        }
        pairs.add(new Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform>(pair[0], pair[1], m));
    }
    /* Elastic alignment */
    /* Initialization */
    final double springTriangleHeightTwice = 2 * Math.sqrt(0.75 * param.springLengthSpringMesh * param.springLengthSpringMesh);
    final ArrayList<SpringMesh> meshes = new ArrayList<SpringMesh>(tiles.size());
    final HashMap<AbstractAffineTile2D<?>, SpringMesh> tileMeshMap = new HashMap<AbstractAffineTile2D<?>, SpringMesh>();
    for (final AbstractAffineTile2D<?> tile : tiles) {
        final double w = tile.getWidth();
        final double h = tile.getHeight();
        final int numX = Math.max(2, (int) Math.ceil(w / param.springLengthSpringMesh) + 1);
        final int numY = Math.max(2, (int) Math.ceil(h / springTriangleHeightTwice) + 1);
        final double wMesh = (numX - 1) * param.springLengthSpringMesh;
        final double hMesh = (numY - 1) * springTriangleHeightTwice;
        final SpringMesh mesh = new SpringMesh(numX, numY, wMesh, hMesh, param.stiffnessSpringMesh, param.maxStretchSpringMesh * param.bmScale, param.dampSpringMesh);
        meshes.add(mesh);
        tileMeshMap.put(tile, mesh);
    }
    // final int blockRadius = Math.max( 32, Util.roundPos( param.springLengthSpringMesh / 2 ) );
    final int blockRadius = Math.max(Util.roundPos(16 / param.bmScale), param.bmBlockRadius);
    /**
     * TODO set this something more than the largest error by the approximate model
     */
    final int searchRadius = param.bmSearchRadius;
    final AbstractModel<?> localSmoothnessFilterModel = mpicbg.trakem2.align.Util.createModel(param.bmLocalModelIndex);
    for (final Triple<AbstractAffineTile2D<?>, AbstractAffineTile2D<?>, InvertibleCoordinateTransform> pair : pairs) {
        final AbstractAffineTile2D<?> t1 = pair.a;
        final AbstractAffineTile2D<?> t2 = pair.b;
        final SpringMesh m1 = tileMeshMap.get(t1);
        final SpringMesh m2 = tileMeshMap.get(t2);
        final ArrayList<PointMatch> pm12 = new ArrayList<PointMatch>();
        final ArrayList<PointMatch> pm21 = new ArrayList<PointMatch>();
        final ArrayList<Vertex> v1 = m1.getVertices();
        final ArrayList<Vertex> v2 = m2.getVertices();
        final String patchName1 = patchName(t1.getPatch());
        final String patchName2 = patchName(t2.getPatch());
        final PatchImage pi1 = t1.getPatch().createTransformedImage();
        if (pi1 == null) {
            Utils.log("Patch `" + patchName1 + "' failed generating a transformed image.  Skipping...");
            continue;
        }
        final PatchImage pi2 = t2.getPatch().createTransformedImage();
        if (pi2 == null) {
            Utils.log("Patch `" + patchName2 + "' failed generating a transformed image.  Skipping...");
            continue;
        }
        final FloatProcessor fp1 = (FloatProcessor) pi1.target.convertToFloat();
        final ByteProcessor mask1 = pi1.getMask();
        final FloatProcessor fpMask1 = mask1 == null ? null : scaleByte(mask1);
        final FloatProcessor fp2 = (FloatProcessor) pi2.target.convertToFloat();
        final ByteProcessor mask2 = pi2.getMask();
        final FloatProcessor fpMask2 = mask2 == null ? null : scaleByte(mask2);
        if (!fixedTiles.contains(t1)) {
            BlockMatching.matchByMaximalPMCC(fp1, fp2, fpMask1, fpMask2, param.bmScale, pair.c, blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v1, pm12, new ErrorStatistic(1));
            if (param.bmUseLocalSmoothnessFilter) {
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondence candidates.");
                localSmoothnessFilterModel.localSmoothnessFilter(pm12, pm12, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': " + pm12.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log("`" + patchName1 + "' > `" + patchName2 + "': found " + pm12.size() + " correspondences.");
            }
        } else {
            Utils.log("Skipping fixed patch `" + patchName1 + "'.");
        }
        if (!fixedTiles.contains(t2)) {
            BlockMatching.matchByMaximalPMCC(fp2, fp1, fpMask2, fpMask1, param.bmScale, pair.c.createInverse(), blockRadius, blockRadius, searchRadius, searchRadius, param.bmMinR, param.bmRodR, param.bmMaxCurvatureR, v2, pm21, new ErrorStatistic(1));
            if (param.bmUseLocalSmoothnessFilter) {
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondence candidates.");
                localSmoothnessFilterModel.localSmoothnessFilter(pm21, pm21, param.bmLocalRegionSigma, param.bmMaxLocalEpsilon, param.bmMaxLocalTrust);
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': " + pm21.size() + " candidates passed local smoothness filter.");
            } else {
                Utils.log("`" + patchName1 + "' < `" + patchName2 + "': found " + pm21.size() + " correspondences.");
            }
        } else {
            Utils.log("Skipping fixed patch `" + patchName2 + "'.");
        }
        for (final PointMatch pm : pm12) {
            final Vertex p1 = (Vertex) pm.getP1();
            final Vertex p2 = new Vertex(pm.getP2());
            p1.addSpring(p2, new Spring(0, 1.0f));
            m2.addPassiveVertex(p2);
        }
        for (final PointMatch pm : pm21) {
            final Vertex p1 = (Vertex) pm.getP1();
            final Vertex p2 = new Vertex(pm.getP2());
            p1.addSpring(p2, new Spring(0, 1.0f));
            m1.addPassiveVertex(p2);
        }
    }
    /* initialize */
    for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) entry.getValue().init(entry.getKey().getModel());
    /* optimize the meshes */
    try {
        final long t0 = System.currentTimeMillis();
        IJ.log("Optimizing spring meshes...");
        if (param.useLegacyOptimizer) {
            Utils.log("  ...using legacy optimizer...");
            SpringMesh.optimizeMeshes2(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        } else {
            SpringMesh.optimizeMeshes(meshes, param.po.maxEpsilon, param.maxIterationsSpringMesh, param.maxPlateauwidthSpringMesh, param.visualize);
        }
        IJ.log("Done optimizing spring meshes. Took " + (System.currentTimeMillis() - t0) + " ms");
    } catch (final NotEnoughDataPointsException e) {
        Utils.log("There were not enough data points to get the spring mesh optimizing.");
        e.printStackTrace();
        return;
    }
    /* apply */
    for (final Map.Entry<AbstractAffineTile2D<?>, SpringMesh> entry : tileMeshMap.entrySet()) {
        final AbstractAffineTile2D<?> tile = entry.getKey();
        if (!fixedTiles.contains(tile)) {
            final Patch patch = tile.getPatch();
            final SpringMesh mesh = entry.getValue();
            final Set<PointMatch> matches = mesh.getVA().keySet();
            Rectangle box = patch.getCoordinateTransformBoundingBox();
            /* compensate for existing coordinate transform bounding box */
            for (final PointMatch pm : matches) {
                final Point p1 = pm.getP1();
                final double[] l = p1.getL();
                l[0] += box.x;
                l[1] += box.y;
            }
            final ThinPlateSplineTransform mlt = ElasticLayerAlignment.makeTPS(matches);
            patch.appendCoordinateTransform(mlt);
            box = patch.getCoordinateTransformBoundingBox();
            patch.getAffineTransform().setToTranslation(box.x, box.y);
            patch.updateInDatabase("transform");
            patch.updateBucket();
            patch.updateMipMaps();
        }
    }
    Utils.log("Done.");
}
Also used : ByteProcessor(ij.process.ByteProcessor) NotEnoughDataPointsException(mpicbg.models.NotEnoughDataPointsException) Vertex(mpicbg.models.Vertex) SpringMesh(mpicbg.models.SpringMesh) ThinPlateSplineTransform(mpicbg.trakem2.transform.ThinPlateSplineTransform) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) RigidModel2D(mpicbg.models.RigidModel2D) AbstractAffineModel2D(mpicbg.models.AbstractAffineModel2D) AffineModel2D(mpicbg.models.AffineModel2D) SimilarityModel2D(mpicbg.models.SimilarityModel2D) FloatProcessor(ij.process.FloatProcessor) Point(mpicbg.models.Point) Spring(mpicbg.models.Spring) Point(mpicbg.models.Point) Triple(mpicbg.trakem2.util.Triple) PointMatch(mpicbg.models.PointMatch) PatchImage(ini.trakem2.display.Patch.PatchImage) InvertibleCoordinateTransform(mpicbg.models.InvertibleCoordinateTransform) ErrorStatistic(mpicbg.models.ErrorStatistic) TranslationModel2D(mpicbg.models.TranslationModel2D) HashMap(java.util.HashMap) Map(java.util.Map) Patch(ini.trakem2.display.Patch)

Example 14 with ByteProcessor

use of ij.process.ByteProcessor in project TrakEM2 by trakem2.

the class ExportARGB method makeFlatImageARGBFromOriginals.

/**
 * Limited to 2GB arrays for the requested image.
 *
 * @param patches
 * @param roi
 * @param backgroundValue
 * @param scale
 * @return
 */
public static final Pair<ColorProcessor, ByteProcessor> makeFlatImageARGBFromOriginals(final List<Patch> patches, final Rectangle roi, final double backgroundValue, final double scale) {
    final ColorProcessor target = new ColorProcessor((int) (roi.width * scale), (int) (roi.height * scale));
    target.setInterpolationMethod(ImageProcessor.BILINEAR);
    final ByteProcessor targetMask = new ByteProcessor(target.getWidth(), target.getHeight());
    targetMask.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
    for (final Patch patch : patches) {
        final Patch.PatchImage pai = patch.createTransformedImage();
        final ColorProcessor fp = (ColorProcessor) pai.target.convertToRGB();
        final ByteProcessor alpha;
        System.out.println("IMAGE:" + patch.getTitle());
        System.out.println("mask: " + pai.mask);
        System.out.println("outside: " + pai.outside);
        if (null == pai.mask) {
            if (null == pai.outside) {
                alpha = new ByteProcessor(fp.getWidth(), fp.getHeight());
                // fully opaque
                Arrays.fill((byte[]) alpha.getPixels(), (byte) 255);
            } else {
                alpha = pai.outside;
            }
        } else {
            alpha = pai.mask;
        }
        // The affine to apply
        final AffineTransform atc = new AffineTransform();
        atc.scale(scale, scale);
        atc.translate(-roi.x, -roi.y);
        final AffineTransform at = new AffineTransform();
        at.preConcatenate(atc);
        at.concatenate(patch.getAffineTransform());
        final AffineModel2D aff = new AffineModel2D();
        aff.set(at);
        final CoordinateTransformMesh mesh = new CoordinateTransformMesh(aff, patch.getMeshResolution(), fp.getWidth(), fp.getHeight());
        final TransformMeshMappingWithMasks<CoordinateTransformMesh> mapping = new TransformMeshMappingWithMasks<CoordinateTransformMesh>(mesh);
        fp.setInterpolationMethod(ImageProcessor.BILINEAR);
        // no interpolation
        alpha.setInterpolationMethod(ImageProcessor.NEAREST_NEIGHBOR);
        mapping.map(fp, alpha, target, targetMask);
    }
    return new Pair<ColorProcessor, ByteProcessor>(target, targetMask);
}
Also used : ByteProcessor(ij.process.ByteProcessor) ColorProcessor(ij.process.ColorProcessor) CoordinateTransformMesh(mpicbg.models.CoordinateTransformMesh) AffineTransform(java.awt.geom.AffineTransform) Patch(ini.trakem2.display.Patch) Pair(mpicbg.trakem2.util.Pair)

Example 15 with ByteProcessor

use of ij.process.ByteProcessor in project TrakEM2 by trakem2.

the class ExportBestFlatImage method makeFlatColorImage.

public Pair<ColorProcessor, ByteProcessor> makeFlatColorImage() {
    printInfo();
    if (canUseAWTImage()) {
        // less than 0.5 GB array size
        final ColorProcessor cp = new ColorProcessor(createAWTImage(ImagePlus.COLOR_RGB));
        final ByteProcessor alpha = new ByteProcessor(cp.getWidth(), cp.getHeight(), cp.getChannel(4));
        return new Pair<ColorProcessor, ByteProcessor>(cp, alpha);
    }
    if (!isSmallerThan2GB()) {
        Utils.log("Cannot create an image larger than 2 GB.");
        return null;
    }
    if (loader.isMipMapsRegenerationEnabled()) {
        return ExportARGB.makeFlatImageARGBFromMipMaps(patches, finalBox, 0, scale);
    }
    // No mipmaps: create an image as large as possible, then downsample it
    final Pair<ColorProcessor, ByteProcessor> pair = ExportARGB.makeFlatImageARGBFromOriginals(patches, finalBox, 0, scaleUP);
    final double sigma = computeSigma(pair.a.getWidth(), pair.a.getHeight());
    new GaussianBlur().blurGaussian(pair.a, sigma, sigma, 0.0002);
    new GaussianBlur().blurGaussian(pair.b, sigma, sigma, 0.0002);
    return pair;
}
Also used : ByteProcessor(ij.process.ByteProcessor) ColorProcessor(ij.process.ColorProcessor) GaussianBlur(ij.plugin.filter.GaussianBlur) Pair(mpicbg.trakem2.util.Pair)

Aggregations

ByteProcessor (ij.process.ByteProcessor)86 ImagePlus (ij.ImagePlus)30 ImageProcessor (ij.process.ImageProcessor)23 FloatProcessor (ij.process.FloatProcessor)21 ShortProcessor (ij.process.ShortProcessor)19 ColorProcessor (ij.process.ColorProcessor)14 ArrayList (java.util.ArrayList)13 Point (java.awt.Point)12 Rectangle (java.awt.Rectangle)11 Roi (ij.gui.Roi)10 AffineTransform (java.awt.geom.AffineTransform)10 ImageStack (ij.ImageStack)9 Patch (ini.trakem2.display.Patch)9 Calibration (ij.measure.Calibration)8 Pair (mpicbg.trakem2.util.Pair)7 Color (java.awt.Color)6 LUT (ij.process.LUT)5 BufferedImage (java.awt.image.BufferedImage)5 IOException (java.io.IOException)5 CoordinateTransformMesh (mpicbg.models.CoordinateTransformMesh)5