Search in sources :

Example 26 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class TextureBlenderDDS method blend.

@Override
public Image blend(Image image, Image baseImage, BlenderContext blenderContext) {
    this.prepareImagesForBlending(image, baseImage);
    Format format = image.getFormat();
    int width = image.getWidth();
    int height = image.getHeight();
    int depth = image.getDepth();
    if (depth == 0) {
        depth = 1;
    }
    ArrayList<ByteBuffer> dataArray = new ArrayList<ByteBuffer>(depth);
    PixelInputOutput basePixelIO = null;
    float[][] compressedMaterialColor = null;
    TexturePixel[] baseTextureColors = null;
    if (baseImage != null) {
        basePixelIO = PixelIOFactory.getPixelIO(baseImage.getFormat());
        compressedMaterialColor = new float[2][4];
        baseTextureColors = new TexturePixel[] { new TexturePixel(), new TexturePixel() };
    }
    float[] resultPixel = new float[4];
    float[] pixelColor = new float[4];
    TexturePixel[] colors = new TexturePixel[] { new TexturePixel(), new TexturePixel() };
    int baseXTexelIndex = 0, baseYTexelIndex = 0;
    float[] alphas = new float[] { 1, 1 };
    for (int dataLayerIndex = 0; dataLayerIndex < depth; ++dataLayerIndex) {
        ByteBuffer data = image.getData(dataLayerIndex);
        data.rewind();
        ByteBuffer newData = BufferUtils.createByteBuffer(data.remaining());
        while (data.hasRemaining()) {
            if (format == Format.DXT3) {
                long alpha = data.getLong();
                // get alpha for first and last pixel that is compressed in the texel
                byte alpha0 = (byte) (alpha << 4 & 0xFF);
                byte alpha1 = (byte) (alpha >> 60 & 0xFF);
                alphas[0] = alpha0 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                alphas[1] = alpha1 >= 0 ? alpha1 / 255.0f : 1.0f - ~alpha1 / 255.0f;
                newData.putLong(alpha);
            } else if (format == Format.DXT5) {
                byte alpha0 = data.get();
                byte alpha1 = data.get();
                alphas[0] = alpha0 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                alphas[1] = alpha1 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                newData.put(alpha0);
                newData.put(alpha1);
                // only read the next 6 bytes (these are alpha indexes)
                newData.putInt(data.getInt());
                newData.putShort(data.getShort());
            }
            int col0 = RGB565.RGB565_to_ARGB8(data.getShort());
            int col1 = RGB565.RGB565_to_ARGB8(data.getShort());
            colors[0].fromARGB8(col0);
            colors[1].fromARGB8(col1);
            // compressing 16 pixels from the base texture as if they belonged to a texel
            if (baseImage != null) {
                // reading pixels (first and last of the 16 colors array)
                // first pixel
                basePixelIO.read(baseImage, dataLayerIndex, baseTextureColors[0], baseXTexelIndex << 2, baseYTexelIndex << 2);
                // last pixel
                basePixelIO.read(baseImage, dataLayerIndex, baseTextureColors[1], baseXTexelIndex << 2 + 4, baseYTexelIndex << 2 + 4);
                baseTextureColors[0].toRGBA(compressedMaterialColor[0]);
                baseTextureColors[1].toRGBA(compressedMaterialColor[1]);
            }
            // blending colors
            for (int i = 0; i < colors.length; ++i) {
                if (negateTexture) {
                    colors[i].negate();
                }
                colors[i].toRGBA(pixelColor);
                pixelColor[3] = alphas[i];
                this.blendPixel(resultPixel, compressedMaterialColor != null ? compressedMaterialColor[i] : materialColor, pixelColor, blenderContext);
                colors[i].fromARGB(1, resultPixel[0], resultPixel[1], resultPixel[2]);
                int argb8 = colors[i].toARGB8();
                short rgb565 = RGB565.ARGB8_to_RGB565(argb8);
                newData.putShort(rgb565);
            }
            // just copy the remaining 4 bytes of the current texel
            newData.putInt(data.getInt());
            ++baseXTexelIndex;
            if (baseXTexelIndex > image.getWidth() >> 2) {
                baseXTexelIndex = 0;
                ++baseYTexelIndex;
            }
        }
        dataArray.add(newData);
    }
    Image result = dataArray.size() > 1 ? new Image(format, width, height, depth, dataArray, ColorSpace.Linear) : new Image(format, width, height, dataArray.get(0), ColorSpace.Linear);
    if (image.getMipMapSizes() != null) {
        result.setMipMapSizes(image.getMipMapSizes().clone());
    }
    return result;
}
Also used : ArrayList(java.util.ArrayList) Image(com.jme3.texture.Image) ByteBuffer(java.nio.ByteBuffer) Format(com.jme3.texture.Image.Format) PixelInputOutput(com.jme3.scene.plugins.blender.textures.io.PixelInputOutput) TexturePixel(com.jme3.scene.plugins.blender.textures.TexturePixel)

Example 27 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class TextureBlenderLuminance method blend.

public Image blend(Image image, Image baseImage, BlenderContext blenderContext) {
    this.prepareImagesForBlending(image, baseImage);
    Format format = image.getFormat();
    PixelInputOutput basePixelIO = null;
    TexturePixel basePixel = null;
    float[] materialColor = this.materialColor;
    if (baseImage != null) {
        basePixelIO = PixelIOFactory.getPixelIO(baseImage.getFormat());
        materialColor = new float[this.materialColor.length];
        basePixel = new TexturePixel();
    }
    int width = image.getWidth();
    int height = image.getHeight();
    int depth = image.getDepth();
    if (depth == 0) {
        depth = 1;
    }
    ArrayList<ByteBuffer> dataArray = new ArrayList<ByteBuffer>(depth);
    float[] resultPixel = new float[4];
    float[] tinAndAlpha = new float[2];
    for (int dataLayerIndex = 0; dataLayerIndex < depth; ++dataLayerIndex) {
        ByteBuffer data = image.getData(dataLayerIndex);
        data.rewind();
        ByteBuffer newData = BufferUtils.createByteBuffer(data.limit() * 4);
        int dataIndex = 0, x = 0, y = 0;
        while (data.hasRemaining()) {
            // getting the proper material color if the base texture is applied
            if (basePixelIO != null) {
                basePixelIO.read(baseImage, dataLayerIndex, basePixel, x, y);
                basePixel.toRGBA(materialColor);
                ++x;
                if (x >= width) {
                    x = 0;
                    ++y;
                }
            }
            this.getTinAndAlpha(data, format, negateTexture, tinAndAlpha);
            this.blendPixel(resultPixel, materialColor, color, tinAndAlpha[0], blendFactor, blendType, blenderContext);
            newData.put(dataIndex++, (byte) (resultPixel[0] * 255.0f));
            newData.put(dataIndex++, (byte) (resultPixel[1] * 255.0f));
            newData.put(dataIndex++, (byte) (resultPixel[2] * 255.0f));
            newData.put(dataIndex++, (byte) (tinAndAlpha[1] * 255.0f));
        }
        dataArray.add(newData);
    }
    Image result = depth > 1 ? new Image(Format.RGBA8, width, height, depth, dataArray, ColorSpace.Linear) : new Image(Format.RGBA8, width, height, dataArray.get(0), ColorSpace.Linear);
    if (image.getMipMapSizes() != null) {
        result.setMipMapSizes(image.getMipMapSizes().clone());
    }
    return result;
}
Also used : Format(com.jme3.texture.Image.Format) PixelInputOutput(com.jme3.scene.plugins.blender.textures.io.PixelInputOutput) ArrayList(java.util.ArrayList) Image(com.jme3.texture.Image) TexturePixel(com.jme3.scene.plugins.blender.textures.TexturePixel) ByteBuffer(java.nio.ByteBuffer)

Example 28 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class HelloOpenCL method testImages.

private boolean testImages(Context clContext, CommandQueue clQueue) {
    try {
        //query supported formats
        for (MemoryAccess ma : MemoryAccess.values()) {
            for (Image.ImageType type : Image.ImageType.values()) {
                try {
                    System.out.println("Formats for " + ma + " and " + type + ": " + Arrays.toString(clContext.querySupportedFormats(ma, type)));
                } catch (UnsupportedOperationException e) {
                    LOG.warning(e.getLocalizedMessage());
                }
            }
        }
        //create an image
        Image.ImageFormat format = new Image.ImageFormat(Image.ImageChannelOrder.RGBA, Image.ImageChannelType.FLOAT);
        Image.ImageDescriptor descr = new Image.ImageDescriptor(Image.ImageType.IMAGE_2D, 1920, 1080, 0, 0);
        Image image = clContext.createImage(MemoryAccess.READ_WRITE, format, descr);
        System.out.println("image created");
        //check queries
        assertEquals(descr.type, image.getImageType(), "Wrong image type");
        assertEquals(format, image.getImageFormat(), "Wrong image format");
        assertEquals(descr.width, image.getWidth(), "Wrong width");
        assertEquals(descr.height, image.getHeight(), "Wrong height");
        //fill with red and blue
        ColorRGBA color1 = ColorRGBA.Red;
        ColorRGBA color2 = ColorRGBA.Blue;
        Event e1 = image.fillAsync(clQueue, new long[] { 0, 0, 0 }, new long[] { descr.width / 2, descr.height, 1 }, color1);
        Event e2 = image.fillAsync(clQueue, new long[] { descr.width / 2, 0, 0 }, new long[] { descr.width / 2, descr.height, 1 }, color2);
        e1.waitForFinished();
        e2.waitForFinished();
        //copy to a buffer
        Buffer buffer = clContext.createBuffer(4 * 4 * 500 * 1024);
        Event e3 = image.copyToBufferAsync(clQueue, buffer, new long[] { 10, 10, 0 }, new long[] { 500, 1024, 1 }, 0);
        e3.release();
        //this buffer must be completely red
        ByteBuffer map1 = buffer.map(clQueue, MappingAccess.MAP_READ_ONLY);
        FloatBuffer map1F = map1.asFloatBuffer();
        map1F.rewind();
        for (int x = 0; x < 500; ++x) {
            for (int y = 0; y < 1024; ++y) {
                float r = map1F.get();
                float g = map1F.get();
                float b = map1F.get();
                float a = map1F.get();
                assertEquals(1, r, "Wrong red component");
                assertEquals(0, g, "Wrong green component");
                assertEquals(0, b, "Wrong blue component");
                assertEquals(1, a, "Wrong alpha component");
            }
        }
        buffer.unmap(clQueue, map1);
        //create a second image
        format = new Image.ImageFormat(Image.ImageChannelOrder.RGBA, Image.ImageChannelType.FLOAT);
        descr = new Image.ImageDescriptor(Image.ImageType.IMAGE_2D, 512, 512, 0, 0);
        Image image2 = clContext.createImage(MemoryAccess.READ_WRITE, format, descr);
        //copy an area of image1 to image2
        image.copyTo(clQueue, image2, new long[] { 1000, 20, 0 }, new long[] { 0, 0, 0 }, new long[] { 512, 512, 1 });
        //this area should be completely blue
        Image.ImageMapping map2 = image2.map(clQueue, new long[] { 0, 0, 0 }, new long[] { 512, 512, 1 }, MappingAccess.MAP_READ_WRITE);
        FloatBuffer map2F = map2.buffer.asFloatBuffer();
        for (int y = 0; y < 512; ++y) {
            for (int x = 0; x < 512; ++x) {
                long index = 4 * x + y * (map2.rowPitch / 4);
                map2F.position((int) index);
                float r = map2F.get();
                float g = map2F.get();
                float b = map2F.get();
                float a = map2F.get();
                assertEquals(0, r, "Wrong red component");
                assertEquals(0, g, "Wrong green component");
                assertEquals(1, b, "Wrong blue component");
                assertEquals(1, a, "Wrong alpha component");
            }
        }
        image2.unmap(clQueue, map2);
        //release
        image.release();
        image2.release();
        buffer.release();
    } catch (AssertionError ex) {
        LOG.log(Level.SEVERE, "image test failed with an assertion error");
        return false;
    } catch (Exception ex) {
        LOG.log(Level.SEVERE, "image test failed with:", ex);
        return false;
    }
    return true;
}
Also used : FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ColorRGBA(com.jme3.math.ColorRGBA)

Example 29 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class VRMouseManager method setImage.

/**
     * Set the image to use as mouse cursor. The given string describe an asset that the underlying application asset manager has to load.
     * @param texture the image to use as mouse cursor.
     */
public void setImage(String texture) {
    if (environment != null) {
        if (environment.getApplication() != null) {
            if (environment.isInVR() == false) {
                Texture tex = environment.getApplication().getAssetManager().loadTexture(texture);
                mouseImage.setTexture(environment.getApplication().getAssetManager(), (Texture2D) tex, true);
                ySize = tex.getImage().getHeight();
                mouseImage.setHeight(ySize);
                mouseImage.setWidth(tex.getImage().getWidth());
                mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
                mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
            } else {
                Texture tex = environment.getApplication().getAssetManager().loadTexture(texture);
                mouseImage.setTexture(environment.getApplication().getAssetManager(), (Texture2D) tex, true);
                ySize = tex.getImage().getHeight();
                mouseImage.setHeight(ySize);
                mouseImage.setWidth(tex.getImage().getWidth());
                mouseImage.getMaterial().getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
                mouseImage.getMaterial().getAdditionalRenderState().setDepthWrite(false);
            }
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : Texture(com.jme3.texture.Texture)

Example 30 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method setupFinalFullTexture.

private void setupFinalFullTexture(Camera cam) {
    if (environment != null) {
        if (environment.getApplication() != null) {
            // create offscreen framebuffer
            FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
            //offBuffer.setSrgb(true);
            //setup framebuffer's texture
            dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
            dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
            dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
            logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")");
            logger.config("               Type: " + dualEyeTex.getType());
            logger.config("               Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight());
            logger.config("        Image depth: " + dualEyeTex.getImage().getDepth());
            logger.config("       Image format: " + dualEyeTex.getImage().getFormat());
            logger.config("  Image color space: " + dualEyeTex.getImage().getColorSpace());
            //setup framebuffer to use texture
            out.setDepthBuffer(Image.Format.Depth);
            out.setColorTexture(dualEyeTex);
            ViewPort viewPort = environment.getApplication().getViewPort();
            viewPort.setClearFlags(true, true, true);
            viewPort.setBackgroundColor(ColorRGBA.Black);
            viewPort.setOutputFrameBuffer(out);
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : Texture2D(com.jme3.texture.Texture2D) ViewPort(com.jme3.renderer.ViewPort) FrameBuffer(com.jme3.texture.FrameBuffer)

Aggregations

Image (com.jme3.texture.Image)68 ByteBuffer (java.nio.ByteBuffer)38 Texture (com.jme3.texture.Texture)27 Texture2D (com.jme3.texture.Texture2D)19 ArrayList (java.util.ArrayList)19 Material (com.jme3.material.Material)18 TextureKey (com.jme3.asset.TextureKey)17 Vector3f (com.jme3.math.Vector3f)17 Format (com.jme3.texture.Image.Format)15 TextureCubeMap (com.jme3.texture.TextureCubeMap)14 ColorRGBA (com.jme3.math.ColorRGBA)13 PixelInputOutput (com.jme3.scene.plugins.blender.textures.io.PixelInputOutput)12 BufferedImage (java.awt.image.BufferedImage)12 Geometry (com.jme3.scene.Geometry)10 InputStream (java.io.InputStream)10 IOException (java.io.IOException)8 TerrainLodControl (com.jme3.terrain.geomipmap.TerrainLodControl)7 TerrainQuad (com.jme3.terrain.geomipmap.TerrainQuad)7 DistanceLodCalculator (com.jme3.terrain.geomipmap.lodcalc.DistanceLodCalculator)7 AbstractHeightMap (com.jme3.terrain.heightmap.AbstractHeightMap)7