Search in sources :

Example 11 with ComponentSampleModel

use of java.awt.image.ComponentSampleModel in project jdk8u_jdk by JetBrains.

the class GIFImageWriteParam method writeRasterData.

private void writeRasterData(RenderedImage image, Rectangle sourceBounds, Dimension destSize, ImageWriteParam param, boolean interlaceFlag) throws IOException {
    int sourceXOffset = sourceBounds.x;
    int sourceYOffset = sourceBounds.y;
    int sourceWidth = sourceBounds.width;
    int sourceHeight = sourceBounds.height;
    int destWidth = destSize.width;
    int destHeight = destSize.height;
    int periodX;
    int periodY;
    if (param == null) {
        periodX = 1;
        periodY = 1;
    } else {
        periodX = param.getSourceXSubsampling();
        periodY = param.getSourceYSubsampling();
    }
    SampleModel sampleModel = image.getSampleModel();
    int bitsPerPixel = sampleModel.getSampleSize()[0];
    int initCodeSize = bitsPerPixel;
    if (initCodeSize == 1) {
        initCodeSize++;
    }
    stream.write(initCodeSize);
    LZWCompressor compressor = new LZWCompressor(stream, initCodeSize, false);
    /* At this moment we know that input image is indexed image.
         * We can directly copy data iff:
         *   - no subsampling required (periodX = 1, periodY = 0)
         *   - we can access data directly (image is non-tiled,
         *     i.e. image data are in single block)
         *   - we can calculate offset in data buffer (next 3 lines)
         */
    boolean isOptimizedCase = periodX == 1 && periodY == 1 && image.getNumXTiles() == 1 && image.getNumYTiles() == 1 && sampleModel instanceof ComponentSampleModel && image.getTile(0, 0) instanceof ByteComponentRaster && image.getTile(0, 0).getDataBuffer() instanceof DataBufferByte;
    int numRowsWritten = 0;
    int progressReportRowPeriod = Math.max(destHeight / 20, 1);
    processImageStarted(imageIndex);
    if (interlaceFlag) {
        if (DEBUG)
            System.out.println("Writing interlaced");
        if (isOptimizedCase) {
            ByteComponentRaster tile = (ByteComponentRaster) image.getTile(0, 0);
            byte[] data = ((DataBufferByte) tile.getDataBuffer()).getData();
            ComponentSampleModel csm = (ComponentSampleModel) tile.getSampleModel();
            int offset = csm.getOffset(sourceXOffset, sourceYOffset, 0);
            // take into account the raster data offset
            offset += tile.getDataOffset(0);
            int lineStride = csm.getScanlineStride();
            writeRowsOpt(data, offset, lineStride, compressor, 0, 8, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += destHeight / 8;
            writeRowsOpt(data, offset, lineStride, compressor, 4, 8, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += (destHeight - 4) / 8;
            writeRowsOpt(data, offset, lineStride, compressor, 2, 4, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += (destHeight - 2) / 4;
            writeRowsOpt(data, offset, lineStride, compressor, 1, 2, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
        } else {
            writeRows(image, compressor, sourceXOffset, periodX, sourceYOffset, 8 * periodY, sourceWidth, 0, 8, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += destHeight / 8;
            writeRows(image, compressor, sourceXOffset, periodX, sourceYOffset + 4 * periodY, 8 * periodY, sourceWidth, 4, 8, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += (destHeight - 4) / 8;
            writeRows(image, compressor, sourceXOffset, periodX, sourceYOffset + 2 * periodY, 4 * periodY, sourceWidth, 2, 4, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
            if (abortRequested()) {
                return;
            }
            numRowsWritten += (destHeight - 2) / 4;
            writeRows(image, compressor, sourceXOffset, periodX, sourceYOffset + periodY, 2 * periodY, sourceWidth, 1, 2, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
        }
    } else {
        if (DEBUG)
            System.out.println("Writing non-interlaced");
        if (isOptimizedCase) {
            Raster tile = image.getTile(0, 0);
            byte[] data = ((DataBufferByte) tile.getDataBuffer()).getData();
            ComponentSampleModel csm = (ComponentSampleModel) tile.getSampleModel();
            int offset = csm.getOffset(sourceXOffset, sourceYOffset, 0);
            int lineStride = csm.getScanlineStride();
            writeRowsOpt(data, offset, lineStride, compressor, 0, 1, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
        } else {
            writeRows(image, compressor, sourceXOffset, periodX, sourceYOffset, periodY, sourceWidth, 0, 1, destWidth, destHeight, numRowsWritten, progressReportRowPeriod);
        }
    }
    if (abortRequested()) {
        return;
    }
    processImageProgress(100.0F);
    compressor.flush();
    stream.write(0x00);
    processImageComplete();
}
Also used : LZWCompressor(com.sun.imageio.plugins.common.LZWCompressor) ComponentSampleModel(java.awt.image.ComponentSampleModel) SampleModel(java.awt.image.SampleModel) ByteComponentRaster(sun.awt.image.ByteComponentRaster) Raster(java.awt.image.Raster) ByteComponentRaster(sun.awt.image.ByteComponentRaster) WritableRaster(java.awt.image.WritableRaster) ComponentSampleModel(java.awt.image.ComponentSampleModel) DataBufferByte(java.awt.image.DataBufferByte)

Example 12 with ComponentSampleModel

use of java.awt.image.ComponentSampleModel in project jdk8u_jdk by JetBrains.

the class WPathGraphics method drawImageToPlatform.

/**
     * The various <code>drawImage()</code> methods for
     * <code>WPathGraphics</code> are all decomposed
     * into an invocation of <code>drawImageToPlatform</code>.
     * The portion of the passed in image defined by
     * <code>srcX, srcY, srcWidth, and srcHeight</code>
     * is transformed by the supplied AffineTransform and
     * drawn using GDI to the printer context.
     *
     * @param   img     The image to be drawn.
     * @param   xform   Used to transform the image before drawing.
     *                  This can be null.
     * @param   bgcolor This color is drawn where the image has transparent
     *                  pixels. If this parameter is null then the
     *                  pixels already in the destination should show
     *                  through.
     * @param   srcX    With srcY this defines the upper-left corner
     *                  of the portion of the image to be drawn.
     *
     * @param   srcY    With srcX this defines the upper-left corner
     *                  of the portion of the image to be drawn.
     * @param   srcWidth    The width of the portion of the image to
     *                      be drawn.
     * @param   srcHeight   The height of the portion of the image to
     *                      be drawn.
     * @param   handlingTransparency if being recursively called to
     *                    print opaque region of transparent image
     */
@Override
protected boolean drawImageToPlatform(Image image, AffineTransform xform, Color bgcolor, int srcX, int srcY, int srcWidth, int srcHeight, boolean handlingTransparency) {
    BufferedImage img = getBufferedImage(image);
    if (img == null) {
        return true;
    }
    WPrinterJob wPrinterJob = (WPrinterJob) getPrinterJob();
    /* The full transform to be applied to the image is the
         * caller's transform concatenated on to the transform
         * from user space to device space. If the caller didn't
         * supply a transform then we just act as if they passed
         * in the identify transform.
         */
    AffineTransform fullTransform = getTransform();
    if (xform == null) {
        xform = new AffineTransform();
    }
    fullTransform.concatenate(xform);
    /* Split the full transform into a pair of
         * transforms. The first transform holds effects
         * that GDI (under Win95) can not perform such
         * as rotation and shearing. The second transform
         * is setup to hold only the scaling effects.
         * These transforms are created such that a point,
         * p, in user space, when transformed by 'fullTransform'
         * lands in the same place as when it is transformed
         * by 'rotTransform' and then 'scaleTransform'.
         *
         * The entire image transformation is not in Java in order
         * to minimize the amount of memory needed in the VM. By
         * dividing the transform in two, we rotate and shear
         * the source image in its own space and only go to
         * the, usually, larger, device space when we ask
         * GDI to perform the final scaling.
         * Clamp this to the device scale for better quality printing.
         */
    double[] fullMatrix = new double[6];
    fullTransform.getMatrix(fullMatrix);
    /* Calculate the amount of scaling in the x
         * and y directions. This scaling is computed by
         * transforming a unit vector along each axis
         * and computing the resulting magnitude.
         * The computed values 'scaleX' and 'scaleY'
         * represent the amount of scaling GDI will be asked
         * to perform.
         */
    Point2D.Float unitVectorX = new Point2D.Float(1, 0);
    Point2D.Float unitVectorY = new Point2D.Float(0, 1);
    fullTransform.deltaTransform(unitVectorX, unitVectorX);
    fullTransform.deltaTransform(unitVectorY, unitVectorY);
    Point2D.Float origin = new Point2D.Float(0, 0);
    double scaleX = unitVectorX.distance(origin);
    double scaleY = unitVectorY.distance(origin);
    double devResX = wPrinterJob.getXRes();
    double devResY = wPrinterJob.getYRes();
    double devScaleX = devResX / DEFAULT_USER_RES;
    double devScaleY = devResY / DEFAULT_USER_RES;
    /* check if rotated or sheared */
    int transformType = fullTransform.getType();
    boolean clampScale = ((transformType & (AffineTransform.TYPE_GENERAL_ROTATION | AffineTransform.TYPE_GENERAL_TRANSFORM)) != 0);
    if (clampScale) {
        if (scaleX > devScaleX)
            scaleX = devScaleX;
        if (scaleY > devScaleY)
            scaleY = devScaleY;
    }
    /* We do not need to draw anything if either scaling
         * factor is zero.
         */
    if (scaleX != 0 && scaleY != 0) {
        /* Here's the transformation we will do with Java2D,
            */
        AffineTransform rotTransform = new AffineTransform(//m00
        fullMatrix[0] / scaleX, //m10
        fullMatrix[1] / scaleY, //m01
        fullMatrix[2] / scaleX, //m11
        fullMatrix[3] / scaleY, //m02
        fullMatrix[4] / scaleX, //m12
        fullMatrix[5] / scaleY);
        /* The scale transform is not used directly: we instead
             * directly multiply by scaleX and scaleY.
             *
             * Conceptually here is what the scaleTransform is:
             *
             * AffineTransform scaleTransform = new AffineTransform(
             *                      scaleX,                     //m00
             *                      0,                          //m10
             *                      0,                          //m01
             *                      scaleY,                     //m11
             *                      0,                          //m02
             *                      0);                         //m12
             */
        /* Convert the image source's rectangle into the rotated
             * and sheared space. Once there, we calculate a rectangle
             * that encloses the resulting shape. It is this rectangle
             * which defines the size of the BufferedImage we need to
             * create to hold the transformed image.
             */
        Rectangle2D.Float srcRect = new Rectangle2D.Float(srcX, srcY, srcWidth, srcHeight);
        Shape rotShape = rotTransform.createTransformedShape(srcRect);
        Rectangle2D rotBounds = rotShape.getBounds2D();
        /* add a fudge factor as some fp precision problems have
             * been observed which caused pixels to be rounded down and
             * out of the image.
             */
        rotBounds.setRect(rotBounds.getX(), rotBounds.getY(), rotBounds.getWidth() + 0.001, rotBounds.getHeight() + 0.001);
        int boundsWidth = (int) rotBounds.getWidth();
        int boundsHeight = (int) rotBounds.getHeight();
        if (boundsWidth > 0 && boundsHeight > 0) {
            /* If the image has transparent or semi-transparent
                 * pixels then we'll have the application re-render
                 * the portion of the page covered by the image.
                 * The BufferedImage will be at the image's resolution
                 * to avoid wasting memory. By re-rendering this portion
                 * of a page all compositing is done by Java2D into
                 * the BufferedImage and then that image is copied to
                 * GDI.
                 * However several special cases can be handled otherwise:
                 * - bitmask transparency with a solid background colour
                 * - images which have transparency color models but no
                 * transparent pixels
                 * - images with bitmask transparency and an IndexColorModel
                 * (the common transparent GIF case) can be handled by
                 * rendering just the opaque pixels.
                 */
            boolean drawOpaque = true;
            if (!handlingTransparency && hasTransparentPixels(img)) {
                drawOpaque = false;
                if (isBitmaskTransparency(img)) {
                    if (bgcolor == null) {
                        if (drawBitmaskImage(img, xform, bgcolor, srcX, srcY, srcWidth, srcHeight)) {
                            // image drawn, just return.
                            return true;
                        }
                    } else if (bgcolor.getTransparency() == Transparency.OPAQUE) {
                        drawOpaque = true;
                    }
                }
                if (!canDoRedraws()) {
                    drawOpaque = true;
                }
            } else {
                // if there's no transparent pixels there's no need
                // for a background colour. This can avoid edge artifacts
                // in rotation cases.
                bgcolor = null;
            }
            // may blit b/g colour (including white) where it shoudn't.
            if ((srcX + srcWidth > img.getWidth(null) || srcY + srcHeight > img.getHeight(null)) && canDoRedraws()) {
                drawOpaque = false;
            }
            if (drawOpaque == false) {
                fullTransform.getMatrix(fullMatrix);
                AffineTransform tx = new AffineTransform(//m00
                fullMatrix[0] / devScaleX, //m10
                fullMatrix[1] / devScaleY, //m01
                fullMatrix[2] / devScaleX, //m11
                fullMatrix[3] / devScaleY, //m02
                fullMatrix[4] / devScaleX, //m12
                fullMatrix[5] / devScaleY);
                Rectangle2D.Float rect = new Rectangle2D.Float(srcX, srcY, srcWidth, srcHeight);
                Shape shape = fullTransform.createTransformedShape(rect);
                // Region isn't user space because its potentially
                // been rotated for landscape.
                Rectangle2D region = shape.getBounds2D();
                region.setRect(region.getX(), region.getY(), region.getWidth() + 0.001, region.getHeight() + 0.001);
                // Try to limit the amount of memory used to 8Mb, so
                // if at device resolution this exceeds a certain
                // image size then scale down the region to fit in
                // that memory, but never to less than 72 dpi.
                int w = (int) region.getWidth();
                int h = (int) region.getHeight();
                int nbytes = w * h * 3;
                int maxBytes = 8 * 1024 * 1024;
                double origDpi = (devResX < devResY) ? devResX : devResY;
                int dpi = (int) origDpi;
                double scaleFactor = 1;
                double maxSFX = w / (double) boundsWidth;
                double maxSFY = h / (double) boundsHeight;
                double maxSF = (maxSFX > maxSFY) ? maxSFY : maxSFX;
                int minDpi = (int) (dpi / maxSF);
                if (minDpi < DEFAULT_USER_RES)
                    minDpi = DEFAULT_USER_RES;
                while (nbytes > maxBytes && dpi > minDpi) {
                    scaleFactor *= 2;
                    dpi /= 2;
                    nbytes /= 4;
                }
                if (dpi < minDpi) {
                    scaleFactor = (origDpi / minDpi);
                }
                region.setRect(region.getX() / scaleFactor, region.getY() / scaleFactor, region.getWidth() / scaleFactor, region.getHeight() / scaleFactor);
                /*
                     * We need to have the clip as part of the saved state,
                     * either directly, or all the components that are
                     * needed to reconstitute it (image source area,
                     * image transform and current graphics transform).
                     * The clip is described in user space, so we need to
                     * save the current graphics transform anyway so just
                     * save these two.
                     */
                wPrinterJob.saveState(getTransform(), getClip(), region, scaleFactor, scaleFactor);
                return true;
            /* The image can be rendered directly by GDI so we
                 * copy it into a BufferedImage (this takes care of
                 * ColorSpace and BufferedImageOp issues) and then
                 * send that to GDI.
                 */
            } else {
                /* Create a buffered image big enough to hold the portion
                     * of the source image being printed.
                     * The image format will be 3BYTE_BGR for most cases
                     * except where we can represent the image as a 1, 4 or 8
                     * bits-per-pixel DIB.
                     */
                int dibType = BufferedImage.TYPE_3BYTE_BGR;
                IndexColorModel icm = null;
                ColorModel cm = img.getColorModel();
                int imgType = img.getType();
                if (cm instanceof IndexColorModel && cm.getPixelSize() <= 8 && (imgType == BufferedImage.TYPE_BYTE_BINARY || imgType == BufferedImage.TYPE_BYTE_INDEXED)) {
                    icm = (IndexColorModel) cm;
                    dibType = imgType;
                    /* BYTE_BINARY may be 2 bpp which DIB can't handle.
                         * Convert this to 4bpp.
                         */
                    if (imgType == BufferedImage.TYPE_BYTE_BINARY && cm.getPixelSize() == 2) {
                        int[] rgbs = new int[16];
                        icm.getRGBs(rgbs);
                        boolean transparent = icm.getTransparency() != Transparency.OPAQUE;
                        int transpixel = icm.getTransparentPixel();
                        icm = new IndexColorModel(4, 16, rgbs, 0, transparent, transpixel, DataBuffer.TYPE_BYTE);
                    }
                }
                int iw = (int) rotBounds.getWidth();
                int ih = (int) rotBounds.getHeight();
                BufferedImage deepImage = null;
                /* If there is no special transform needed (this is a
                     * simple BLIT) and dibType == img.getType() and we
                     * didn't create a new IndexColorModel AND the whole of
                     * the source image is being drawn (GDI can't handle a
                     * portion of the original source image) then we
                     * don't need to create this intermediate image - GDI
                     * can access the data from the original image.
                     * Since a subimage can be created by calling
                     * BufferedImage.getSubImage() that condition needs to
                     * be accounted for too. This implies inspecting the
                     * data buffer. In the end too many cases are not able
                     * to take advantage of this option until we can teach
                     * the native code to properly navigate the data buffer.
                     * There was a concern that since in native code since we
                     * need to DWORD align and flip to a bottom up DIB that
                     * the "original" image may get perturbed by this.
                     * But in fact we always malloc new memory for the aligned
                     * copy so this isn't a problem.
                     * This points out that we allocate two temporaries copies
                     * of the image : one in Java and one in native. If
                     * we can be smarter about not allocating this one when
                     * not needed, that would seem like a good thing to do,
                     * even if in many cases the ColorModels don't match and
                     * its needed.
                     * Until all of this is resolved newImage is always true.
                     */
                boolean newImage = true;
                if (newImage) {
                    if (icm == null) {
                        deepImage = new BufferedImage(iw, ih, dibType);
                    } else {
                        deepImage = new BufferedImage(iw, ih, dibType, icm);
                    }
                    /* Setup a Graphics2D on to the BufferedImage so that
                         * the source image when copied, lands within the
                         * image buffer.
                         */
                    Graphics2D imageGraphics = deepImage.createGraphics();
                    imageGraphics.clipRect(0, 0, deepImage.getWidth(), deepImage.getHeight());
                    imageGraphics.translate(-rotBounds.getX(), -rotBounds.getY());
                    imageGraphics.transform(rotTransform);
                    /* Fill the BufferedImage either with the caller
                         * supplied color, 'bgColor' or, if null, with white.
                         */
                    if (bgcolor == null) {
                        bgcolor = Color.white;
                    }
                    imageGraphics.drawImage(img, srcX, srcY, srcX + srcWidth, srcY + srcHeight, srcX, srcY, srcX + srcWidth, srcY + srcHeight, bgcolor, null);
                    imageGraphics.dispose();
                } else {
                    deepImage = img;
                }
                /* Scale the bounding rectangle by the scale transform.
                     * Because the scaling transform has only x and y
                     * scaling components it is equivalent to multiply
                     * the x components of the bounding rectangle by
                     * the x scaling factor and to multiply the y components
                     * by the y scaling factor.
                     */
                Rectangle2D.Float scaledBounds = new Rectangle2D.Float((float) (rotBounds.getX() * scaleX), (float) (rotBounds.getY() * scaleY), (float) (rotBounds.getWidth() * scaleX), (float) (rotBounds.getHeight() * scaleY));
                /* Pull the raster data from the buffered image
                     * and pass it along to GDI.
                     */
                WritableRaster raster = deepImage.getRaster();
                byte[] data;
                if (raster instanceof ByteComponentRaster) {
                    data = ((ByteComponentRaster) raster).getDataStorage();
                } else if (raster instanceof BytePackedRaster) {
                    data = ((BytePackedRaster) raster).getDataStorage();
                } else {
                    return false;
                }
                int bitsPerPixel = 24;
                SampleModel sm = deepImage.getSampleModel();
                if (sm instanceof ComponentSampleModel) {
                    ComponentSampleModel csm = (ComponentSampleModel) sm;
                    bitsPerPixel = csm.getPixelStride() * 8;
                } else if (sm instanceof MultiPixelPackedSampleModel) {
                    MultiPixelPackedSampleModel mppsm = (MultiPixelPackedSampleModel) sm;
                    bitsPerPixel = mppsm.getPixelBitStride();
                } else {
                    if (icm != null) {
                        int diw = deepImage.getWidth();
                        int dih = deepImage.getHeight();
                        if (diw > 0 && dih > 0) {
                            bitsPerPixel = data.length * 8 / diw / dih;
                        }
                    }
                }
                /* Because the caller's image has been rotated
                     * and sheared into our BufferedImage and because
                     * we will be handing that BufferedImage directly to
                     * GDI, we need to set an additional clip. This clip
                     * makes sure that only parts of the BufferedImage
                     * that are also part of the caller's image are drawn.
                     */
                Shape holdClip = getClip();
                clip(xform.createTransformedShape(srcRect));
                deviceClip(getClip().getPathIterator(getTransform()));
                wPrinterJob.drawDIBImage(data, scaledBounds.x, scaledBounds.y, (float) Math.rint(scaledBounds.width + 0.5), (float) Math.rint(scaledBounds.height + 0.5), 0f, 0f, deepImage.getWidth(), deepImage.getHeight(), bitsPerPixel, icm);
                setClip(holdClip);
            }
        }
    }
    return true;
}
Also used : Shape(java.awt.Shape) Rectangle2D(java.awt.geom.Rectangle2D) MultiPixelPackedSampleModel(java.awt.image.MultiPixelPackedSampleModel) ComponentSampleModel(java.awt.image.ComponentSampleModel) BufferedImage(java.awt.image.BufferedImage) Graphics2D(java.awt.Graphics2D) ProxyGraphics2D(sun.print.ProxyGraphics2D) BytePackedRaster(sun.awt.image.BytePackedRaster) ComponentSampleModel(java.awt.image.ComponentSampleModel) SampleModel(java.awt.image.SampleModel) MultiPixelPackedSampleModel(java.awt.image.MultiPixelPackedSampleModel) Point2D(java.awt.geom.Point2D) IndexColorModel(java.awt.image.IndexColorModel) ColorModel(java.awt.image.ColorModel) WritableRaster(java.awt.image.WritableRaster) ByteComponentRaster(sun.awt.image.ByteComponentRaster) AffineTransform(java.awt.geom.AffineTransform) IndexColorModel(java.awt.image.IndexColorModel)

Aggregations

ComponentSampleModel (java.awt.image.ComponentSampleModel)12 Point (java.awt.Point)7 MultiPixelPackedSampleModel (java.awt.image.MultiPixelPackedSampleModel)6 SampleModel (java.awt.image.SampleModel)5 SinglePixelPackedSampleModel (java.awt.image.SinglePixelPackedSampleModel)5 ColorModel (java.awt.image.ColorModel)4 IndexColorModel (java.awt.image.IndexColorModel)4 DirectColorModel (java.awt.image.DirectColorModel)3 WritableRaster (java.awt.image.WritableRaster)3 ByteComponentRaster (sun.awt.image.ByteComponentRaster)3 BufferedImage (java.awt.image.BufferedImage)2 ComponentColorModel (java.awt.image.ComponentColorModel)2 DataBufferByte (java.awt.image.DataBufferByte)2 Raster (java.awt.image.Raster)2 LZWCompressor (com.sun.imageio.plugins.common.LZWCompressor)1 Graphics2D (java.awt.Graphics2D)1 Rectangle (java.awt.Rectangle)1 Shape (java.awt.Shape)1 ColorSpace (java.awt.color.ColorSpace)1 AffineTransform (java.awt.geom.AffineTransform)1