Search in sources :

Example 11 with RenderedImage

use of java.awt.image.RenderedImage in project jersey by jersey.

the class RenderedImageTypeTest method testPostGif.

@Test
public void testPostGif() throws Exception {
    final InputStream stream = getClass().getResourceAsStream("duke_rocket.gif");
    Response response = target().request().post(Entity.entity(stream, "image/gif"));
    assertThat(Long.valueOf(response.getHeaderString("Content-Length")), greaterThan(0L));
    final RenderedImage image = response.readEntity(RenderedImage.class);
    assertThat(image, notNullValue());
    response = target().request().post(Entity.entity(image, "image/png"));
    assertThat(response.readEntity(RenderedImage.class), notNullValue());
    assertThat(Long.valueOf(response.getHeaderString("Content-Length")), greaterThan(0L));
}
Also used : Response(javax.ws.rs.core.Response) InputStream(java.io.InputStream) RenderedImage(java.awt.image.RenderedImage) Test(org.junit.Test) JerseyTest(org.glassfish.jersey.test.JerseyTest)

Example 12 with RenderedImage

use of java.awt.image.RenderedImage in project jdk8u_jdk by JetBrains.

the class RenderableImageOp method createRendering.

/**
     * Creates a RenderedImage which represents this
     * RenderableImageOp (including its Renderable sources) rendered
     * according to the given RenderContext.
     *
     * <p> This method supports chaining of either Renderable or
     * RenderedImage operations.  If sources in
     * the ParameterBlock used to construct the RenderableImageOp are
     * RenderableImages, then a three step process is followed:
     *
     * <ol>
     * <li> mapRenderContext() is called on the associated CRIF for
     * each RenderableImage source;
     * <li> createRendering() is called on each of the RenderableImage sources
     * using the backwards-mapped RenderContexts obtained in step 1,
     * resulting in a rendering of each source;
     * <li> ContextualRenderedImageFactory.create() is called
     * with a new ParameterBlock containing the parameters of
     * the RenderableImageOp and the RenderedImages that were created by the
     * createRendering() calls.
     * </ol>
     *
     * <p> If the elements of the source Vector of
     * the ParameterBlock used to construct the RenderableImageOp are
     * instances of RenderedImage, then the CRIF.create() method is
     * called immediately using the original ParameterBlock.
     * This provides a basis case for the recursion.
     *
     * <p> The created RenderedImage may have a property identified
     * by the String HINTS_OBSERVED to indicate which RenderingHints
     * (from the RenderContext) were used to create the image.
     * In addition any RenderedImages
     * that are obtained via the getSources() method on the created
     * RenderedImage may have such a property.
     *
     * @param renderContext The RenderContext to use to perform the rendering.
     * @return a RenderedImage containing the desired output image.
     */
public RenderedImage createRendering(RenderContext renderContext) {
    RenderedImage image = null;
    RenderContext rcOut = null;
    // Clone the original ParameterBlock; if the ParameterBlock
    // contains RenderableImage sources, they will be replaced by
    // RenderedImages.
    ParameterBlock renderedParamBlock = (ParameterBlock) paramBlock.clone();
    Vector sources = getRenderableSources();
    try {
        if (sources != null) {
            Vector renderedSources = new Vector();
            for (int i = 0; i < sources.size(); i++) {
                rcOut = myCRIF.mapRenderContext(i, renderContext, paramBlock, this);
                RenderedImage rdrdImage = ((RenderableImage) sources.elementAt(i)).createRendering(rcOut);
                if (rdrdImage == null) {
                    return null;
                }
                // Add this rendered image to the ParameterBlock's
                // list of RenderedImages.
                renderedSources.addElement(rdrdImage);
            }
            if (renderedSources.size() > 0) {
                renderedParamBlock.setSources(renderedSources);
            }
        }
        return myCRIF.create(renderContext, renderedParamBlock);
    } catch (ArrayIndexOutOfBoundsException e) {
        // This should never happen
        return null;
    }
}
Also used : RenderedImage(java.awt.image.RenderedImage) Vector(java.util.Vector)

Example 13 with RenderedImage

use of java.awt.image.RenderedImage in project jdk8u_jdk by JetBrains.

the class GIFImageWriteParam method write.

/**
     * Writes any extension blocks, the Image Descriptor, the image data,
     * and optionally the header (Signature and Logical Screen Descriptor)
     * and trailer (Block Terminator).
     *
     * @param writeHeader Whether to write the header.
     * @param writeTrailer Whether to write the trailer.
     * @param sm The stream metadata or <code>null</code> if
     * <code>writeHeader</code> is <code>false</code>.
     * @param iioimage The image and image metadata.
     * @param p The write parameters.
     *
     * @throws IllegalArgumentException if the number of bands is not 1.
     * @throws IllegalArgumentException if the number of bits per sample is
     * greater than 8.
     * @throws IllegalArgumentException if the color component size is
     * greater than 8.
     * @throws IllegalArgumentException if <code>writeHeader</code> is
     * <code>true</code> and <code>sm</code> is <code>null</code>.
     * @throws IllegalArgumentException if <code>writeHeader</code> is
     * <code>false</code> and a sequence is not being written.
     */
private void write(boolean writeHeader, boolean writeTrailer, IIOMetadata sm, IIOImage iioimage, ImageWriteParam p) throws IOException {
    clearAbortRequest();
    RenderedImage image = iioimage.getRenderedImage();
    // Check for ability to encode image.
    if (needToCreateIndex(image)) {
        image = PaletteBuilder.createIndexedImage(image);
        iioimage.setRenderedImage(image);
    }
    ColorModel colorModel = image.getColorModel();
    SampleModel sampleModel = image.getSampleModel();
    // Determine source region and destination dimensions.
    Rectangle sourceBounds = new Rectangle(image.getMinX(), image.getMinY(), image.getWidth(), image.getHeight());
    Dimension destSize = new Dimension();
    computeRegions(sourceBounds, destSize, p);
    // Convert any provided image metadata.
    GIFWritableImageMetadata imageMetadata = null;
    if (iioimage.getMetadata() != null) {
        imageMetadata = new GIFWritableImageMetadata();
        convertMetadata(IMAGE_METADATA_NAME, iioimage.getMetadata(), imageMetadata);
        // gray-scale representations
        if (imageMetadata.localColorTable == null) {
            imageMetadata.localColorTable = createColorTable(colorModel, sampleModel);
            // transparent pixels
            if (colorModel instanceof IndexColorModel) {
                IndexColorModel icm = (IndexColorModel) colorModel;
                int index = icm.getTransparentPixel();
                imageMetadata.transparentColorFlag = (index != -1);
                if (imageMetadata.transparentColorFlag) {
                    imageMetadata.transparentColorIndex = index;
                }
            /* NB: transparentColorFlag might have not beed reset for
                       greyscale images but explicitly reseting it here
                       is potentially not right thing to do until we have way
                       to find whether current value was explicitly set by
                       the user.
                    */
            }
        }
    }
    // Global color table values.
    byte[] globalColorTable = null;
    // Global Color Table).
    if (writeHeader) {
        if (sm == null) {
            throw new IllegalArgumentException("Cannot write null header!");
        }
        GIFWritableStreamMetadata streamMetadata = (GIFWritableStreamMetadata) sm;
        // Set the version if not set.
        if (streamMetadata.version == null) {
            streamMetadata.version = "89a";
        }
        // Set the Logical Screen Desriptor if not set.
        if (streamMetadata.logicalScreenWidth == GIFMetadata.UNDEFINED_INTEGER_VALUE) {
            streamMetadata.logicalScreenWidth = destSize.width;
        }
        if (streamMetadata.logicalScreenHeight == GIFMetadata.UNDEFINED_INTEGER_VALUE) {
            streamMetadata.logicalScreenHeight = destSize.height;
        }
        if (streamMetadata.colorResolution == GIFMetadata.UNDEFINED_INTEGER_VALUE) {
            streamMetadata.colorResolution = colorModel != null ? colorModel.getComponentSize()[0] : sampleModel.getSampleSize()[0];
        }
        // provided in the stream metadata.
        if (streamMetadata.globalColorTable == null) {
            if (isWritingSequence && imageMetadata != null && imageMetadata.localColorTable != null) {
                // Writing a sequence and a local color table was
                // provided in the metadata of the first image: use it.
                streamMetadata.globalColorTable = imageMetadata.localColorTable;
            } else if (imageMetadata == null || imageMetadata.localColorTable == null) {
                // Create a color table.
                streamMetadata.globalColorTable = createColorTable(colorModel, sampleModel);
            }
        }
        // Set the Global Color Table. At this point it should be
        // A) the global color table provided in stream metadata, if any;
        // B) the local color table of the image metadata, if any, if
        //    writing a sequence;
        // C) a table created on the basis of the first image ColorModel
        //    and SampleModel if no local color table is available; or
        // D) null if none of the foregoing conditions obtain (which
        //    should only be if a sequence is not being written and
        //    a local color table is provided in image metadata).
        globalColorTable = streamMetadata.globalColorTable;
        // Write the header.
        int bitsPerPixel;
        if (globalColorTable != null) {
            bitsPerPixel = getNumBits(globalColorTable.length / 3);
        } else if (imageMetadata != null && imageMetadata.localColorTable != null) {
            bitsPerPixel = getNumBits(imageMetadata.localColorTable.length / 3);
        } else {
            bitsPerPixel = sampleModel.getSampleSize(0);
        }
        writeHeader(streamMetadata, bitsPerPixel);
    } else if (isWritingSequence) {
        globalColorTable = theStreamMetadata.globalColorTable;
    } else {
        throw new IllegalArgumentException("Must write header for single image!");
    }
    // Write extension blocks, Image Descriptor, and image data.
    writeImage(iioimage.getRenderedImage(), imageMetadata, p, globalColorTable, sourceBounds, destSize);
    // Write the trailer.
    if (writeTrailer) {
        writeTrailer();
    }
}
Also used : ComponentSampleModel(java.awt.image.ComponentSampleModel) SampleModel(java.awt.image.SampleModel) IndexColorModel(java.awt.image.IndexColorModel) ColorModel(java.awt.image.ColorModel) Rectangle(java.awt.Rectangle) Dimension(java.awt.Dimension) RenderedImage(java.awt.image.RenderedImage) IndexColorModel(java.awt.image.IndexColorModel)

Example 14 with RenderedImage

use of java.awt.image.RenderedImage in project jdk8u_jdk by JetBrains.

the class WBMPImageWriter method write.

public void write(IIOMetadata streamMetadata, IIOImage image, ImageWriteParam param) throws IOException {
    if (stream == null) {
        throw new IllegalStateException(I18N.getString("WBMPImageWriter3"));
    }
    if (image == null) {
        throw new IllegalArgumentException(I18N.getString("WBMPImageWriter4"));
    }
    clearAbortRequest();
    processImageStarted(0);
    if (param == null)
        param = getDefaultWriteParam();
    RenderedImage input = null;
    Raster inputRaster = null;
    boolean writeRaster = image.hasRaster();
    Rectangle sourceRegion = param.getSourceRegion();
    SampleModel sampleModel = null;
    if (writeRaster) {
        inputRaster = image.getRaster();
        sampleModel = inputRaster.getSampleModel();
    } else {
        input = image.getRenderedImage();
        sampleModel = input.getSampleModel();
        inputRaster = input.getData();
    }
    checkSampleModel(sampleModel);
    if (sourceRegion == null)
        sourceRegion = inputRaster.getBounds();
    else
        sourceRegion = sourceRegion.intersection(inputRaster.getBounds());
    if (sourceRegion.isEmpty())
        throw new RuntimeException(I18N.getString("WBMPImageWriter1"));
    int scaleX = param.getSourceXSubsampling();
    int scaleY = param.getSourceYSubsampling();
    int xOffset = param.getSubsamplingXOffset();
    int yOffset = param.getSubsamplingYOffset();
    sourceRegion.translate(xOffset, yOffset);
    sourceRegion.width -= xOffset;
    sourceRegion.height -= yOffset;
    int minX = sourceRegion.x / scaleX;
    int minY = sourceRegion.y / scaleY;
    int w = (sourceRegion.width + scaleX - 1) / scaleX;
    int h = (sourceRegion.height + scaleY - 1) / scaleY;
    Rectangle destinationRegion = new Rectangle(minX, minY, w, h);
    sampleModel = sampleModel.createCompatibleSampleModel(w, h);
    SampleModel destSM = sampleModel;
    // If the data are not formatted nominally then reformat.
    if (sampleModel.getDataType() != DataBuffer.TYPE_BYTE || !(sampleModel instanceof MultiPixelPackedSampleModel) || ((MultiPixelPackedSampleModel) sampleModel).getDataBitOffset() != 0) {
        destSM = new MultiPixelPackedSampleModel(DataBuffer.TYPE_BYTE, w, h, 1, w + 7 >> 3, 0);
    }
    if (!destinationRegion.equals(sourceRegion)) {
        if (scaleX == 1 && scaleY == 1)
            inputRaster = inputRaster.createChild(inputRaster.getMinX(), inputRaster.getMinY(), w, h, minX, minY, null);
        else {
            WritableRaster ras = Raster.createWritableRaster(destSM, new Point(minX, minY));
            byte[] data = ((DataBufferByte) ras.getDataBuffer()).getData();
            for (int j = minY, y = sourceRegion.y, k = 0; j < minY + h; j++, y += scaleY) {
                for (int i = 0, x = sourceRegion.x; i < w; i++, x += scaleX) {
                    int v = inputRaster.getSample(x, y, 0);
                    data[k + (i >> 3)] |= v << (7 - (i & 7));
                }
                k += w + 7 >> 3;
            }
            inputRaster = ras;
        }
    }
    // If the data are not formatted nominally then reformat.
    if (!destSM.equals(inputRaster.getSampleModel())) {
        WritableRaster raster = Raster.createWritableRaster(destSM, new Point(inputRaster.getMinX(), inputRaster.getMinY()));
        raster.setRect(inputRaster);
        inputRaster = raster;
    }
    // Check whether the image is white-is-zero.
    boolean isWhiteZero = false;
    if (!writeRaster && input.getColorModel() instanceof IndexColorModel) {
        IndexColorModel icm = (IndexColorModel) input.getColorModel();
        isWhiteZero = icm.getRed(0) > icm.getRed(1);
    }
    // Get the line stride, bytes per row, and data array.
    int lineStride = ((MultiPixelPackedSampleModel) destSM).getScanlineStride();
    int bytesPerRow = (w + 7) / 8;
    byte[] bdata = ((DataBufferByte) inputRaster.getDataBuffer()).getData();
    // Write WBMP header.
    // TypeField
    stream.write(0);
    // FixHeaderField
    stream.write(0);
    // width
    stream.write(intToMultiByte(w));
    // height
    stream.write(intToMultiByte(h));
    // Write the data.
    if (!isWhiteZero && lineStride == bytesPerRow) {
        // Write the entire image.
        stream.write(bdata, 0, h * bytesPerRow);
        processImageProgress(100.0F);
    } else {
        // Write the image row-by-row.
        int offset = 0;
        if (!isWhiteZero) {
            // Black-is-zero
            for (int row = 0; row < h; row++) {
                if (abortRequested())
                    break;
                stream.write(bdata, offset, bytesPerRow);
                offset += lineStride;
                processImageProgress(100.0F * row / h);
            }
        } else {
            // White-is-zero: need to invert data.
            byte[] inverted = new byte[bytesPerRow];
            for (int row = 0; row < h; row++) {
                if (abortRequested())
                    break;
                for (int col = 0; col < bytesPerRow; col++) {
                    inverted[col] = (byte) (~(bdata[col + offset]));
                }
                stream.write(inverted, 0, bytesPerRow);
                offset += lineStride;
                processImageProgress(100.0F * row / h);
            }
        }
    }
    if (abortRequested())
        processWriteAborted();
    else {
        processImageComplete();
        stream.flushBefore(stream.getStreamPosition());
    }
}
Also used : Raster(java.awt.image.Raster) WritableRaster(java.awt.image.WritableRaster) Rectangle(java.awt.Rectangle) MultiPixelPackedSampleModel(java.awt.image.MultiPixelPackedSampleModel) Point(java.awt.Point) DataBufferByte(java.awt.image.DataBufferByte) Point(java.awt.Point) SampleModel(java.awt.image.SampleModel) MultiPixelPackedSampleModel(java.awt.image.MultiPixelPackedSampleModel) WritableRaster(java.awt.image.WritableRaster) RenderedImage(java.awt.image.RenderedImage) IndexColorModel(java.awt.image.IndexColorModel)

Example 15 with RenderedImage

use of java.awt.image.RenderedImage in project jdk8u_jdk by JetBrains.

the class JPEGImageWriter method writeOnThread.

private void writeOnThread(IIOMetadata streamMetadata, IIOImage image, ImageWriteParam param) throws IOException {
    if (ios == null) {
        throw new IllegalStateException("Output has not been set!");
    }
    if (image == null) {
        throw new IllegalArgumentException("image is null!");
    }
    // if streamMetadata is not null, issue a warning
    if (streamMetadata != null) {
        warningOccurred(WARNING_STREAM_METADATA_IGNORED);
    }
    // Obtain the raster and image, if there is one
    boolean rasterOnly = image.hasRaster();
    RenderedImage rimage = null;
    if (rasterOnly) {
        srcRas = image.getRaster();
    } else {
        rimage = image.getRenderedImage();
        if (rimage instanceof BufferedImage) {
            // Use the Raster directly.
            srcRas = ((BufferedImage) rimage).getRaster();
        } else if (rimage.getNumXTiles() == 1 && rimage.getNumYTiles() == 1) {
            // Get the unique tile.
            srcRas = rimage.getTile(rimage.getMinTileX(), rimage.getMinTileY());
            // as the tile dimensions might differ.
            if (srcRas.getWidth() != rimage.getWidth() || srcRas.getHeight() != rimage.getHeight()) {
                srcRas = srcRas.createChild(srcRas.getMinX(), srcRas.getMinY(), rimage.getWidth(), rimage.getHeight(), srcRas.getMinX(), srcRas.getMinY(), null);
            }
        } else {
            // Image is tiled so get a contiguous raster by copying.
            srcRas = rimage.getData();
        }
    }
    // Now determine if we are using a band subset
    // By default, we are using all source bands
    int numSrcBands = srcRas.getNumBands();
    indexed = false;
    indexCM = null;
    ColorModel cm = null;
    ColorSpace cs = null;
    isAlphaPremultiplied = false;
    srcCM = null;
    if (!rasterOnly) {
        cm = rimage.getColorModel();
        if (cm != null) {
            cs = cm.getColorSpace();
            if (cm instanceof IndexColorModel) {
                indexed = true;
                indexCM = (IndexColorModel) cm;
                numSrcBands = cm.getNumComponents();
            }
            if (cm.isAlphaPremultiplied()) {
                isAlphaPremultiplied = true;
                srcCM = cm;
            }
        }
    }
    srcBands = JPEG.bandOffsets[numSrcBands - 1];
    int numBandsUsed = numSrcBands;
    if (param != null) {
        int[] sBands = param.getSourceBands();
        if (sBands != null) {
            if (indexed) {
                warningOccurred(WARNING_NO_BANDS_ON_INDEXED);
            } else {
                srcBands = sBands;
                numBandsUsed = srcBands.length;
                if (numBandsUsed > numSrcBands) {
                    throw new IIOException("ImageWriteParam specifies too many source bands");
                }
            }
        }
    }
    boolean usingBandSubset = (numBandsUsed != numSrcBands);
    boolean fullImage = ((!rasterOnly) && (!usingBandSubset));
    int[] bandSizes = null;
    if (!indexed) {
        bandSizes = srcRas.getSampleModel().getSampleSize();
        // If this is a subset, we must adjust bandSizes
        if (usingBandSubset) {
            int[] temp = new int[numBandsUsed];
            for (int i = 0; i < numBandsUsed; i++) {
                temp[i] = bandSizes[srcBands[i]];
            }
            bandSizes = temp;
        }
    } else {
        int[] tempSize = srcRas.getSampleModel().getSampleSize();
        bandSizes = new int[numSrcBands];
        for (int i = 0; i < numSrcBands; i++) {
            // All the same
            bandSizes[i] = tempSize[0];
        }
    }
    for (int i = 0; i < bandSizes.length; i++) {
        // per sample.
        if (bandSizes[i] <= 0 || bandSizes[i] > 8) {
            throw new IIOException("Illegal band size: should be 0 < size <= 8");
        }
        // to 8-bit.
        if (indexed) {
            bandSizes[i] = 8;
        }
    }
    if (debug) {
        System.out.println("numSrcBands is " + numSrcBands);
        System.out.println("numBandsUsed is " + numBandsUsed);
        System.out.println("usingBandSubset is " + usingBandSubset);
        System.out.println("fullImage is " + fullImage);
        System.out.print("Band sizes:");
        for (int i = 0; i < bandSizes.length; i++) {
            System.out.print(" " + bandSizes[i]);
        }
        System.out.println();
    }
    // Destination type, if there is one
    ImageTypeSpecifier destType = null;
    if (param != null) {
        destType = param.getDestinationType();
        // Ignore dest type if we are writing a complete image
        if ((fullImage) && (destType != null)) {
            warningOccurred(WARNING_DEST_IGNORED);
            destType = null;
        }
    }
    // Examine the param
    sourceXOffset = srcRas.getMinX();
    sourceYOffset = srcRas.getMinY();
    int imageWidth = srcRas.getWidth();
    int imageHeight = srcRas.getHeight();
    sourceWidth = imageWidth;
    sourceHeight = imageHeight;
    int periodX = 1;
    int periodY = 1;
    int gridX = 0;
    int gridY = 0;
    JPEGQTable[] qTables = null;
    JPEGHuffmanTable[] DCHuffmanTables = null;
    JPEGHuffmanTable[] ACHuffmanTables = null;
    boolean optimizeHuffman = false;
    JPEGImageWriteParam jparam = null;
    int progressiveMode = ImageWriteParam.MODE_DISABLED;
    if (param != null) {
        Rectangle sourceRegion = param.getSourceRegion();
        if (sourceRegion != null) {
            Rectangle imageBounds = new Rectangle(sourceXOffset, sourceYOffset, sourceWidth, sourceHeight);
            sourceRegion = sourceRegion.intersection(imageBounds);
            sourceXOffset = sourceRegion.x;
            sourceYOffset = sourceRegion.y;
            sourceWidth = sourceRegion.width;
            sourceHeight = sourceRegion.height;
        }
        if (sourceWidth + sourceXOffset > imageWidth) {
            sourceWidth = imageWidth - sourceXOffset;
        }
        if (sourceHeight + sourceYOffset > imageHeight) {
            sourceHeight = imageHeight - sourceYOffset;
        }
        periodX = param.getSourceXSubsampling();
        periodY = param.getSourceYSubsampling();
        gridX = param.getSubsamplingXOffset();
        gridY = param.getSubsamplingYOffset();
        switch(param.getCompressionMode()) {
            case ImageWriteParam.MODE_DISABLED:
                throw new IIOException("JPEG compression cannot be disabled");
            case ImageWriteParam.MODE_EXPLICIT:
                float quality = param.getCompressionQuality();
                quality = JPEG.convertToLinearQuality(quality);
                qTables = new JPEGQTable[2];
                qTables[0] = JPEGQTable.K1Luminance.getScaledInstance(quality, true);
                qTables[1] = JPEGQTable.K2Chrominance.getScaledInstance(quality, true);
                break;
            case ImageWriteParam.MODE_DEFAULT:
                qTables = new JPEGQTable[2];
                qTables[0] = JPEGQTable.K1Div2Luminance;
                qTables[1] = JPEGQTable.K2Div2Chrominance;
                break;
        }
        progressiveMode = param.getProgressiveMode();
        if (param instanceof JPEGImageWriteParam) {
            jparam = (JPEGImageWriteParam) param;
            optimizeHuffman = jparam.getOptimizeHuffmanTables();
        }
    }
    // Now examine the metadata
    IIOMetadata mdata = image.getMetadata();
    if (mdata != null) {
        if (mdata instanceof JPEGMetadata) {
            metadata = (JPEGMetadata) mdata;
            if (debug) {
                System.out.println("We have metadata, and it's JPEG metadata");
            }
        } else {
            if (!rasterOnly) {
                ImageTypeSpecifier type = destType;
                if (type == null) {
                    type = new ImageTypeSpecifier(rimage);
                }
                metadata = (JPEGMetadata) convertImageMetadata(mdata, type, param);
            } else {
                warningOccurred(WARNING_METADATA_NOT_JPEG_FOR_RASTER);
            }
        }
    }
    // First set a default state
    // If it's there, use it
    ignoreJFIF = false;
    // If it's there, use it
    ignoreAdobe = false;
    // Change if needed
    newAdobeTransform = JPEG.ADOBE_IMPOSSIBLE;
    writeDefaultJFIF = false;
    writeAdobe = false;
    // By default we'll do no conversion:
    int inCsType = JPEG.JCS_UNKNOWN;
    int outCsType = JPEG.JCS_UNKNOWN;
    JFIFMarkerSegment jfif = null;
    AdobeMarkerSegment adobe = null;
    SOFMarkerSegment sof = null;
    if (metadata != null) {
        jfif = (JFIFMarkerSegment) metadata.findMarkerSegment(JFIFMarkerSegment.class, true);
        adobe = (AdobeMarkerSegment) metadata.findMarkerSegment(AdobeMarkerSegment.class, true);
        sof = (SOFMarkerSegment) metadata.findMarkerSegment(SOFMarkerSegment.class, true);
    }
    // By default don't write one
    iccProfile = null;
    // PhotoYCC does this
    convertTosRGB = false;
    converted = null;
    if (destType != null) {
        if (numBandsUsed != destType.getNumBands()) {
            throw new IIOException("Number of source bands != number of destination bands");
        }
        cs = destType.getColorModel().getColorSpace();
        // Check the metadata against the destination type
        if (metadata != null) {
            checkSOFBands(sof, numBandsUsed);
            checkJFIF(jfif, destType, false);
            // Do we want to write an ICC profile?
            if ((jfif != null) && (ignoreJFIF == false)) {
                if (JPEG.isNonStandardICC(cs)) {
                    iccProfile = ((ICC_ColorSpace) cs).getProfile();
                }
            }
            checkAdobe(adobe, destType, false);
        } else {
            // If we can add a JFIF or an Adobe marker segment, do so
            if (JPEG.isJFIFcompliant(destType, false)) {
                writeDefaultJFIF = true;
                // Do we want to write an ICC profile?
                if (JPEG.isNonStandardICC(cs)) {
                    iccProfile = ((ICC_ColorSpace) cs).getProfile();
                }
            } else {
                int transform = JPEG.transformForType(destType, false);
                if (transform != JPEG.ADOBE_IMPOSSIBLE) {
                    writeAdobe = true;
                    newAdobeTransform = transform;
                }
            }
            // re-create the metadata
            metadata = new JPEGMetadata(destType, null, this);
        }
        inCsType = getSrcCSType(destType);
        outCsType = getDefaultDestCSType(destType);
    } else {
        // no destination type
        if (metadata == null) {
            if (fullImage) {
                // no dest, no metadata, full image
                // Use default metadata matching the image and param
                metadata = new JPEGMetadata(new ImageTypeSpecifier(rimage), param, this);
                if (metadata.findMarkerSegment(JFIFMarkerSegment.class, true) != null) {
                    cs = rimage.getColorModel().getColorSpace();
                    if (JPEG.isNonStandardICC(cs)) {
                        iccProfile = ((ICC_ColorSpace) cs).getProfile();
                    }
                }
                inCsType = getSrcCSType(rimage);
                outCsType = getDefaultDestCSType(rimage);
            }
        // else no dest, no metadata, not an image,
        // so no special headers, no color conversion
        } else {
            // no dest type, but there is metadata
            checkSOFBands(sof, numBandsUsed);
            if (fullImage) {
                // no dest, metadata, image
                // Check that the metadata and the image match
                ImageTypeSpecifier inputType = new ImageTypeSpecifier(rimage);
                inCsType = getSrcCSType(rimage);
                if (cm != null) {
                    boolean alpha = cm.hasAlpha();
                    switch(cs.getType()) {
                        case ColorSpace.TYPE_GRAY:
                            if (!alpha) {
                                outCsType = JPEG.JCS_GRAYSCALE;
                            } else {
                                if (jfif != null) {
                                    ignoreJFIF = true;
                                    warningOccurred(WARNING_IMAGE_METADATA_JFIF_MISMATCH);
                                }
                            // out colorspace remains unknown
                            }
                            if ((adobe != null) && (adobe.transform != JPEG.ADOBE_UNKNOWN)) {
                                newAdobeTransform = JPEG.ADOBE_UNKNOWN;
                                warningOccurred(WARNING_IMAGE_METADATA_ADOBE_MISMATCH);
                            }
                            break;
                        case ColorSpace.TYPE_RGB:
                            if (!alpha) {
                                if (jfif != null) {
                                    outCsType = JPEG.JCS_YCbCr;
                                    if (JPEG.isNonStandardICC(cs) || ((cs instanceof ICC_ColorSpace) && (jfif.iccSegment != null))) {
                                        iccProfile = ((ICC_ColorSpace) cs).getProfile();
                                    }
                                } else if (adobe != null) {
                                    switch(adobe.transform) {
                                        case JPEG.ADOBE_UNKNOWN:
                                            outCsType = JPEG.JCS_RGB;
                                            break;
                                        case JPEG.ADOBE_YCC:
                                            outCsType = JPEG.JCS_YCbCr;
                                            break;
                                        default:
                                            warningOccurred(WARNING_IMAGE_METADATA_ADOBE_MISMATCH);
                                            newAdobeTransform = JPEG.ADOBE_UNKNOWN;
                                            outCsType = JPEG.JCS_RGB;
                                            break;
                                    }
                                } else {
                                    // consult the ids
                                    int outCS = sof.getIDencodedCSType();
                                    // consult the sampling factors
                                    if (outCS != JPEG.JCS_UNKNOWN) {
                                        outCsType = outCS;
                                    } else {
                                        boolean subsampled = isSubsampled(sof.componentSpecs);
                                        if (subsampled) {
                                            outCsType = JPEG.JCS_YCbCr;
                                        } else {
                                            outCsType = JPEG.JCS_RGB;
                                        }
                                    }
                                }
                            } else {
                                // RGBA
                                if (jfif != null) {
                                    ignoreJFIF = true;
                                    warningOccurred(WARNING_IMAGE_METADATA_JFIF_MISMATCH);
                                }
                                if (adobe != null) {
                                    if (adobe.transform != JPEG.ADOBE_UNKNOWN) {
                                        newAdobeTransform = JPEG.ADOBE_UNKNOWN;
                                        warningOccurred(WARNING_IMAGE_METADATA_ADOBE_MISMATCH);
                                    }
                                    outCsType = JPEG.JCS_RGBA;
                                } else {
                                    // consult the ids
                                    int outCS = sof.getIDencodedCSType();
                                    // consult the sampling factors
                                    if (outCS != JPEG.JCS_UNKNOWN) {
                                        outCsType = outCS;
                                    } else {
                                        boolean subsampled = isSubsampled(sof.componentSpecs);
                                        outCsType = subsampled ? JPEG.JCS_YCbCrA : JPEG.JCS_RGBA;
                                    }
                                }
                            }
                            break;
                        case ColorSpace.TYPE_3CLR:
                            if (cs == JPEG.JCS.getYCC()) {
                                if (!alpha) {
                                    if (jfif != null) {
                                        convertTosRGB = true;
                                        convertOp = new ColorConvertOp(cs, JPEG.JCS.sRGB, null);
                                        outCsType = JPEG.JCS_YCbCr;
                                    } else if (adobe != null) {
                                        if (adobe.transform != JPEG.ADOBE_YCC) {
                                            newAdobeTransform = JPEG.ADOBE_YCC;
                                            warningOccurred(WARNING_IMAGE_METADATA_ADOBE_MISMATCH);
                                        }
                                        outCsType = JPEG.JCS_YCC;
                                    } else {
                                        outCsType = JPEG.JCS_YCC;
                                    }
                                } else {
                                    // PhotoYCCA
                                    if (jfif != null) {
                                        ignoreJFIF = true;
                                        warningOccurred(WARNING_IMAGE_METADATA_JFIF_MISMATCH);
                                    } else if (adobe != null) {
                                        if (adobe.transform != JPEG.ADOBE_UNKNOWN) {
                                            newAdobeTransform = JPEG.ADOBE_UNKNOWN;
                                            warningOccurred(WARNING_IMAGE_METADATA_ADOBE_MISMATCH);
                                        }
                                    }
                                    outCsType = JPEG.JCS_YCCA;
                                }
                            }
                    }
                }
            }
        // else no dest, metadata, not an image.  Defaults ok
        }
    }
    boolean metadataProgressive = false;
    int[] scans = null;
    if (metadata != null) {
        if (sof == null) {
            sof = (SOFMarkerSegment) metadata.findMarkerSegment(SOFMarkerSegment.class, true);
        }
        if ((sof != null) && (sof.tag == JPEG.SOF2)) {
            metadataProgressive = true;
            if (progressiveMode == ImageWriteParam.MODE_COPY_FROM_METADATA) {
                // Might still be null
                scans = collectScans(metadata, sof);
            } else {
                numScans = 0;
            }
        }
        if (jfif == null) {
            jfif = (JFIFMarkerSegment) metadata.findMarkerSegment(JFIFMarkerSegment.class, true);
        }
    }
    thumbnails = image.getThumbnails();
    int numThumbs = image.getNumThumbnails();
    forceJFIF = false;
    // then thumbnails can be written
    if (!writeDefaultJFIF) {
        // If there is no metadata, then we can't write thumbnails
        if (metadata == null) {
            thumbnails = null;
            if (numThumbs != 0) {
                warningOccurred(WARNING_IGNORING_THUMBS);
            }
        } else {
            // then the user must specify JFIF on the metadata
            if (fullImage == false) {
                if (jfif == null) {
                    // Or we can't include thumbnails
                    thumbnails = null;
                    if (numThumbs != 0) {
                        warningOccurred(WARNING_IGNORING_THUMBS);
                    }
                }
            } else {
                // It is a full image, and there is metadata
                if (jfif == null) {
                    // Can it have JFIF?
                    if ((outCsType == JPEG.JCS_GRAYSCALE) || (outCsType == JPEG.JCS_YCbCr)) {
                        if (numThumbs != 0) {
                            forceJFIF = true;
                            warningOccurred(WARNING_FORCING_JFIF);
                        }
                    } else {
                        // Nope, not JFIF-compatible
                        thumbnails = null;
                        if (numThumbs != 0) {
                            warningOccurred(WARNING_IGNORING_THUMBS);
                        }
                    }
                }
            }
        }
    }
    // Set up a boolean to indicate whether we need to call back to
    // write metadata
    boolean haveMetadata = ((metadata != null) || writeDefaultJFIF || writeAdobe);
    // Now that we have dealt with metadata, finalize our tables set up
    // Are we going to write tables?  By default, yes.
    boolean writeDQT = true;
    boolean writeDHT = true;
    // But if the metadata has no tables, no.
    DQTMarkerSegment dqt = null;
    DHTMarkerSegment dht = null;
    int restartInterval = 0;
    if (metadata != null) {
        dqt = (DQTMarkerSegment) metadata.findMarkerSegment(DQTMarkerSegment.class, true);
        dht = (DHTMarkerSegment) metadata.findMarkerSegment(DHTMarkerSegment.class, true);
        DRIMarkerSegment dri = (DRIMarkerSegment) metadata.findMarkerSegment(DRIMarkerSegment.class, true);
        if (dri != null) {
            restartInterval = dri.restartInterval;
        }
        if (dqt == null) {
            writeDQT = false;
        }
        if (dht == null) {
            // Ignored if optimizeHuffman is true
            writeDHT = false;
        }
    }
    // to use
    if (qTables == null) {
        // Get them from metadata, or use defaults
        if (dqt != null) {
            qTables = collectQTablesFromMetadata(metadata);
        } else if (streamQTables != null) {
            qTables = streamQTables;
        } else if ((jparam != null) && (jparam.areTablesSet())) {
            qTables = jparam.getQTables();
        } else {
            qTables = JPEG.getDefaultQTables();
        }
    }
    // If we are optimizing, we don't want any tables.
    if (optimizeHuffman == false) {
        // If they were for progressive scans, we can't use them.
        if ((dht != null) && (metadataProgressive == false)) {
            DCHuffmanTables = collectHTablesFromMetadata(metadata, true);
            ACHuffmanTables = collectHTablesFromMetadata(metadata, false);
        } else if (streamDCHuffmanTables != null) {
            DCHuffmanTables = streamDCHuffmanTables;
            ACHuffmanTables = streamACHuffmanTables;
        } else if ((jparam != null) && (jparam.areTablesSet())) {
            DCHuffmanTables = jparam.getDCHuffmanTables();
            ACHuffmanTables = jparam.getACHuffmanTables();
        } else {
            DCHuffmanTables = JPEG.getDefaultHuffmanTables(true);
            ACHuffmanTables = JPEG.getDefaultHuffmanTables(false);
        }
    }
    // By default, ids are 1 - N, no subsampling
    int[] componentIds = new int[numBandsUsed];
    int[] HsamplingFactors = new int[numBandsUsed];
    int[] VsamplingFactors = new int[numBandsUsed];
    int[] QtableSelectors = new int[numBandsUsed];
    for (int i = 0; i < numBandsUsed; i++) {
        // JFIF compatible
        componentIds[i] = i + 1;
        HsamplingFactors[i] = 1;
        VsamplingFactors[i] = 1;
        QtableSelectors[i] = 0;
    }
    // Now override them with the contents of sof, if there is one,
    if (sof != null) {
        for (int i = 0; i < numBandsUsed; i++) {
            if (forceJFIF == false) {
                // else use JFIF-compatible default
                componentIds[i] = sof.componentSpecs[i].componentId;
            }
            HsamplingFactors[i] = sof.componentSpecs[i].HsamplingFactor;
            VsamplingFactors[i] = sof.componentSpecs[i].VsamplingFactor;
            QtableSelectors[i] = sof.componentSpecs[i].QtableSelector;
        }
    }
    sourceXOffset += gridX;
    sourceWidth -= gridX;
    sourceYOffset += gridY;
    sourceHeight -= gridY;
    int destWidth = (sourceWidth + periodX - 1) / periodX;
    int destHeight = (sourceHeight + periodY - 1) / periodY;
    // Create an appropriate 1-line databuffer for writing
    int lineSize = sourceWidth * numBandsUsed;
    DataBufferByte buffer = new DataBufferByte(lineSize);
    // Create a raster from that
    int[] bandOffs = JPEG.bandOffsets[numBandsUsed - 1];
    raster = Raster.createInterleavedRaster(buffer, sourceWidth, 1, lineSize, numBandsUsed, bandOffs, null);
    // Call the writer, who will call back for every scanline
    clearAbortRequest();
    cbLock.lock();
    try {
        processImageStarted(currentImage);
    } finally {
        cbLock.unlock();
    }
    boolean aborted = false;
    if (debug) {
        System.out.println("inCsType: " + inCsType);
        System.out.println("outCsType: " + outCsType);
    }
    // Note that getData disables acceleration on buffer, but it is
    // just a 1-line intermediate data transfer buffer that does not
    // affect the acceleration of the source image.
    aborted = writeImage(structPointer, buffer.getData(), inCsType, outCsType, numBandsUsed, bandSizes, sourceWidth, destWidth, destHeight, periodX, periodY, qTables, writeDQT, DCHuffmanTables, ACHuffmanTables, writeDHT, optimizeHuffman, (progressiveMode != ImageWriteParam.MODE_DISABLED), numScans, scans, componentIds, HsamplingFactors, VsamplingFactors, QtableSelectors, haveMetadata, restartInterval);
    cbLock.lock();
    try {
        if (aborted) {
            processWriteAborted();
        } else {
            processImageComplete();
        }
        ios.flush();
    } finally {
        cbLock.unlock();
    }
    // After a successful write
    currentImage++;
}
Also used : ColorSpace(java.awt.color.ColorSpace) ICC_ColorSpace(java.awt.color.ICC_ColorSpace) Rectangle(java.awt.Rectangle) JPEGQTable(javax.imageio.plugins.jpeg.JPEGQTable) DataBufferByte(java.awt.image.DataBufferByte) BufferedImage(java.awt.image.BufferedImage) ImageTypeSpecifier(javax.imageio.ImageTypeSpecifier) JPEGHuffmanTable(javax.imageio.plugins.jpeg.JPEGHuffmanTable) IndexColorModel(java.awt.image.IndexColorModel) ColorModel(java.awt.image.ColorModel) JPEGImageWriteParam(javax.imageio.plugins.jpeg.JPEGImageWriteParam) IndexColorModel(java.awt.image.IndexColorModel) IIOException(javax.imageio.IIOException) IIOMetadata(javax.imageio.metadata.IIOMetadata) ICC_ColorSpace(java.awt.color.ICC_ColorSpace) ColorConvertOp(java.awt.image.ColorConvertOp) RenderedImage(java.awt.image.RenderedImage)

Aggregations

RenderedImage (java.awt.image.RenderedImage)15 Rectangle (java.awt.Rectangle)7 SampleModel (java.awt.image.SampleModel)5 ColorModel (java.awt.image.ColorModel)4 IndexColorModel (java.awt.image.IndexColorModel)4 IOException (java.io.IOException)4 FontRenderContext (java.awt.font.FontRenderContext)3 AffineTransform (java.awt.geom.AffineTransform)3 BufferedImage (java.awt.image.BufferedImage)3 DataBufferByte (java.awt.image.DataBufferByte)3 Raster (java.awt.image.Raster)3 ImageTypeSpecifier (javax.imageio.ImageTypeSpecifier)3 IIOMetadata (javax.imageio.metadata.IIOMetadata)3 NoninvertibleTransformException (java.awt.geom.NoninvertibleTransformException)2 ComponentSampleModel (java.awt.image.ComponentSampleModel)2 DirectColorModel (java.awt.image.DirectColorModel)2 RenderContext (java.awt.image.renderable.RenderContext)2 File (java.io.File)2 InputStream (java.io.InputStream)2 Response (javax.ws.rs.core.Response)2