Search in sources :

Example 51 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class MaterialHelper method getParticlesMaterial.

/**
     * This method converts the given material into particles-usable material.
     * The texture and glow color are being copied.
     * The method assumes it receives the Lighting type of material.
     * @param material
     *            the source material
     * @param blenderContext
     *            the blender context
     * @return material converted into particles-usable material
     */
public Material getParticlesMaterial(Material material, Integer alphaMaskIndex, BlenderContext blenderContext) {
    Material result = new Material(blenderContext.getAssetManager(), "Common/MatDefs/Misc/Particle.j3md");
    // copying texture
    MatParam diffuseMap = material.getParam("DiffuseMap");
    if (diffuseMap != null) {
        Texture texture = ((Texture) diffuseMap.getValue()).clone();
        // applying alpha mask to the texture
        Image image = texture.getImage();
        ByteBuffer sourceBB = image.getData(0);
        sourceBB.rewind();
        int w = image.getWidth();
        int h = image.getHeight();
        ByteBuffer bb = BufferUtils.createByteBuffer(w * h * 4);
        IAlphaMask iAlphaMask = alphaMasks.get(alphaMaskIndex);
        iAlphaMask.setImageSize(w, h);
        for (int x = 0; x < w; ++x) {
            for (int y = 0; y < h; ++y) {
                bb.put(sourceBB.get());
                bb.put(sourceBB.get());
                bb.put(sourceBB.get());
                bb.put(iAlphaMask.getAlpha(x, y));
            }
        }
        image = new Image(Format.RGBA8, w, h, bb, ColorSpace.Linear);
        texture.setImage(image);
        result.setTextureParam("Texture", VarType.Texture2D, texture);
    }
    // copying glow color
    MatParam glowColor = material.getParam("GlowColor");
    if (glowColor != null) {
        ColorRGBA color = (ColorRGBA) glowColor.getValue();
        result.setParam("GlowColor", VarType.Vector3, color);
    }
    return result;
}
Also used : MatParam(com.jme3.material.MatParam) ColorRGBA(com.jme3.math.ColorRGBA) Material(com.jme3.material.Material) Image(com.jme3.texture.Image) Texture(com.jme3.texture.Texture) ByteBuffer(java.nio.ByteBuffer)

Example 52 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class AbstractTextureBlender method blendHSV.

/**
     * The method that performs the ramp blending.
     * 
     * @param type
     *            the blend type
     * @param materialRGB
     *            the rgb value of the material, here the result is stored too
     * @param fac
     *            color affection factor
     * @param pixelColor
     *            the texture color
     * @param blenderContext
     *            the blender context
     */
protected void blendHSV(int type, float[] materialRGB, float fac, float[] pixelColor, BlenderContext blenderContext) {
    float oneMinusFactor = 1.0f - fac;
    MaterialHelper materialHelper = blenderContext.getHelper(MaterialHelper.class);
    switch(type) {
        case MTEX_BLEND_HUE:
            {
                // FIXME: not working well for image textures (works fine for generated textures)
                float[] colorTransformResult = new float[3];
                materialHelper.rgbToHsv(pixelColor[0], pixelColor[1], pixelColor[2], colorTransformResult);
                if (colorTransformResult[0] != 0.0f) {
                    float colH = colorTransformResult[0];
                    materialHelper.rgbToHsv(materialRGB[0], materialRGB[1], materialRGB[2], colorTransformResult);
                    materialHelper.hsvToRgb(colH, colorTransformResult[1], colorTransformResult[2], colorTransformResult);
                    materialRGB[0] = oneMinusFactor * materialRGB[0] + fac * colorTransformResult[0];
                    materialRGB[1] = oneMinusFactor * materialRGB[1] + fac * colorTransformResult[1];
                    materialRGB[2] = oneMinusFactor * materialRGB[2] + fac * colorTransformResult[2];
                }
                break;
            }
        case MTEX_BLEND_SAT:
            {
                float[] colorTransformResult = new float[3];
                materialHelper.rgbToHsv(materialRGB[0], materialRGB[1], materialRGB[2], colorTransformResult);
                float h = colorTransformResult[0];
                float s = colorTransformResult[1];
                float v = colorTransformResult[2];
                if (s != 0.0f) {
                    materialHelper.rgbToHsv(pixelColor[0], pixelColor[1], pixelColor[2], colorTransformResult);
                    materialHelper.hsvToRgb(h, oneMinusFactor * s + fac * colorTransformResult[1], v, materialRGB);
                }
                break;
            }
        case MTEX_BLEND_VAL:
            {
                float[] rgbToHsv = new float[3];
                float[] colToHsv = new float[3];
                materialHelper.rgbToHsv(materialRGB[0], materialRGB[1], materialRGB[2], rgbToHsv);
                materialHelper.rgbToHsv(pixelColor[0], pixelColor[1], pixelColor[2], colToHsv);
                materialHelper.hsvToRgb(rgbToHsv[0], rgbToHsv[1], oneMinusFactor * rgbToHsv[2] + fac * colToHsv[2], materialRGB);
                break;
            }
        case MTEX_BLEND_COLOR:
            {
                // FIXME: not working well for image textures (works fine for generated textures)
                float[] rgbToHsv = new float[3];
                float[] colToHsv = new float[3];
                materialHelper.rgbToHsv(pixelColor[0], pixelColor[1], pixelColor[2], colToHsv);
                if (colToHsv[2] != 0) {
                    materialHelper.rgbToHsv(materialRGB[0], materialRGB[1], materialRGB[2], rgbToHsv);
                    materialHelper.hsvToRgb(colToHsv[0], colToHsv[1], rgbToHsv[2], rgbToHsv);
                    materialRGB[0] = oneMinusFactor * materialRGB[0] + fac * rgbToHsv[0];
                    materialRGB[1] = oneMinusFactor * materialRGB[1] + fac * rgbToHsv[1];
                    materialRGB[2] = oneMinusFactor * materialRGB[2] + fac * rgbToHsv[2];
                }
                break;
            }
        default:
            throw new IllegalStateException("Unknown ramp type: " + type);
    }
}
Also used : MaterialHelper(com.jme3.scene.plugins.blender.materials.MaterialHelper)

Example 53 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class TextureBlenderDDS method blend.

@Override
public Image blend(Image image, Image baseImage, BlenderContext blenderContext) {
    this.prepareImagesForBlending(image, baseImage);
    Format format = image.getFormat();
    int width = image.getWidth();
    int height = image.getHeight();
    int depth = image.getDepth();
    if (depth == 0) {
        depth = 1;
    }
    ArrayList<ByteBuffer> dataArray = new ArrayList<ByteBuffer>(depth);
    PixelInputOutput basePixelIO = null;
    float[][] compressedMaterialColor = null;
    TexturePixel[] baseTextureColors = null;
    if (baseImage != null) {
        basePixelIO = PixelIOFactory.getPixelIO(baseImage.getFormat());
        compressedMaterialColor = new float[2][4];
        baseTextureColors = new TexturePixel[] { new TexturePixel(), new TexturePixel() };
    }
    float[] resultPixel = new float[4];
    float[] pixelColor = new float[4];
    TexturePixel[] colors = new TexturePixel[] { new TexturePixel(), new TexturePixel() };
    int baseXTexelIndex = 0, baseYTexelIndex = 0;
    float[] alphas = new float[] { 1, 1 };
    for (int dataLayerIndex = 0; dataLayerIndex < depth; ++dataLayerIndex) {
        ByteBuffer data = image.getData(dataLayerIndex);
        data.rewind();
        ByteBuffer newData = BufferUtils.createByteBuffer(data.remaining());
        while (data.hasRemaining()) {
            if (format == Format.DXT3) {
                long alpha = data.getLong();
                // get alpha for first and last pixel that is compressed in the texel
                byte alpha0 = (byte) (alpha << 4 & 0xFF);
                byte alpha1 = (byte) (alpha >> 60 & 0xFF);
                alphas[0] = alpha0 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                alphas[1] = alpha1 >= 0 ? alpha1 / 255.0f : 1.0f - ~alpha1 / 255.0f;
                newData.putLong(alpha);
            } else if (format == Format.DXT5) {
                byte alpha0 = data.get();
                byte alpha1 = data.get();
                alphas[0] = alpha0 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                alphas[1] = alpha1 >= 0 ? alpha0 / 255.0f : 1.0f - ~alpha0 / 255.0f;
                newData.put(alpha0);
                newData.put(alpha1);
                // only read the next 6 bytes (these are alpha indexes)
                newData.putInt(data.getInt());
                newData.putShort(data.getShort());
            }
            int col0 = RGB565.RGB565_to_ARGB8(data.getShort());
            int col1 = RGB565.RGB565_to_ARGB8(data.getShort());
            colors[0].fromARGB8(col0);
            colors[1].fromARGB8(col1);
            // compressing 16 pixels from the base texture as if they belonged to a texel
            if (baseImage != null) {
                // reading pixels (first and last of the 16 colors array)
                // first pixel
                basePixelIO.read(baseImage, dataLayerIndex, baseTextureColors[0], baseXTexelIndex << 2, baseYTexelIndex << 2);
                // last pixel
                basePixelIO.read(baseImage, dataLayerIndex, baseTextureColors[1], baseXTexelIndex << 2 + 4, baseYTexelIndex << 2 + 4);
                baseTextureColors[0].toRGBA(compressedMaterialColor[0]);
                baseTextureColors[1].toRGBA(compressedMaterialColor[1]);
            }
            // blending colors
            for (int i = 0; i < colors.length; ++i) {
                if (negateTexture) {
                    colors[i].negate();
                }
                colors[i].toRGBA(pixelColor);
                pixelColor[3] = alphas[i];
                this.blendPixel(resultPixel, compressedMaterialColor != null ? compressedMaterialColor[i] : materialColor, pixelColor, blenderContext);
                colors[i].fromARGB(1, resultPixel[0], resultPixel[1], resultPixel[2]);
                int argb8 = colors[i].toARGB8();
                short rgb565 = RGB565.ARGB8_to_RGB565(argb8);
                newData.putShort(rgb565);
            }
            // just copy the remaining 4 bytes of the current texel
            newData.putInt(data.getInt());
            ++baseXTexelIndex;
            if (baseXTexelIndex > image.getWidth() >> 2) {
                baseXTexelIndex = 0;
                ++baseYTexelIndex;
            }
        }
        dataArray.add(newData);
    }
    Image result = dataArray.size() > 1 ? new Image(format, width, height, depth, dataArray, ColorSpace.Linear) : new Image(format, width, height, dataArray.get(0), ColorSpace.Linear);
    if (image.getMipMapSizes() != null) {
        result.setMipMapSizes(image.getMipMapSizes().clone());
    }
    return result;
}
Also used : ArrayList(java.util.ArrayList) Image(com.jme3.texture.Image) ByteBuffer(java.nio.ByteBuffer) Format(com.jme3.texture.Image.Format) PixelInputOutput(com.jme3.scene.plugins.blender.textures.io.PixelInputOutput) TexturePixel(com.jme3.scene.plugins.blender.textures.TexturePixel)

Example 54 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class TextureBlenderLuminance method blend.

public Image blend(Image image, Image baseImage, BlenderContext blenderContext) {
    this.prepareImagesForBlending(image, baseImage);
    Format format = image.getFormat();
    PixelInputOutput basePixelIO = null;
    TexturePixel basePixel = null;
    float[] materialColor = this.materialColor;
    if (baseImage != null) {
        basePixelIO = PixelIOFactory.getPixelIO(baseImage.getFormat());
        materialColor = new float[this.materialColor.length];
        basePixel = new TexturePixel();
    }
    int width = image.getWidth();
    int height = image.getHeight();
    int depth = image.getDepth();
    if (depth == 0) {
        depth = 1;
    }
    ArrayList<ByteBuffer> dataArray = new ArrayList<ByteBuffer>(depth);
    float[] resultPixel = new float[4];
    float[] tinAndAlpha = new float[2];
    for (int dataLayerIndex = 0; dataLayerIndex < depth; ++dataLayerIndex) {
        ByteBuffer data = image.getData(dataLayerIndex);
        data.rewind();
        ByteBuffer newData = BufferUtils.createByteBuffer(data.limit() * 4);
        int dataIndex = 0, x = 0, y = 0;
        while (data.hasRemaining()) {
            // getting the proper material color if the base texture is applied
            if (basePixelIO != null) {
                basePixelIO.read(baseImage, dataLayerIndex, basePixel, x, y);
                basePixel.toRGBA(materialColor);
                ++x;
                if (x >= width) {
                    x = 0;
                    ++y;
                }
            }
            this.getTinAndAlpha(data, format, negateTexture, tinAndAlpha);
            this.blendPixel(resultPixel, materialColor, color, tinAndAlpha[0], blendFactor, blendType, blenderContext);
            newData.put(dataIndex++, (byte) (resultPixel[0] * 255.0f));
            newData.put(dataIndex++, (byte) (resultPixel[1] * 255.0f));
            newData.put(dataIndex++, (byte) (resultPixel[2] * 255.0f));
            newData.put(dataIndex++, (byte) (tinAndAlpha[1] * 255.0f));
        }
        dataArray.add(newData);
    }
    Image result = depth > 1 ? new Image(Format.RGBA8, width, height, depth, dataArray, ColorSpace.Linear) : new Image(Format.RGBA8, width, height, dataArray.get(0), ColorSpace.Linear);
    if (image.getMipMapSizes() != null) {
        result.setMipMapSizes(image.getMipMapSizes().clone());
    }
    return result;
}
Also used : Format(com.jme3.texture.Image.Format) PixelInputOutput(com.jme3.scene.plugins.blender.textures.io.PixelInputOutput) ArrayList(java.util.ArrayList) Image(com.jme3.texture.Image) TexturePixel(com.jme3.scene.plugins.blender.textures.TexturePixel) ByteBuffer(java.nio.ByteBuffer)

Example 55 with Image

use of com.jme3.texture.Image in project jmonkeyengine by jMonkeyEngine.

the class HelloOpenCL method testImages.

private boolean testImages(Context clContext, CommandQueue clQueue) {
    try {
        //query supported formats
        for (MemoryAccess ma : MemoryAccess.values()) {
            for (Image.ImageType type : Image.ImageType.values()) {
                try {
                    System.out.println("Formats for " + ma + " and " + type + ": " + Arrays.toString(clContext.querySupportedFormats(ma, type)));
                } catch (UnsupportedOperationException e) {
                    LOG.warning(e.getLocalizedMessage());
                }
            }
        }
        //create an image
        Image.ImageFormat format = new Image.ImageFormat(Image.ImageChannelOrder.RGBA, Image.ImageChannelType.FLOAT);
        Image.ImageDescriptor descr = new Image.ImageDescriptor(Image.ImageType.IMAGE_2D, 1920, 1080, 0, 0);
        Image image = clContext.createImage(MemoryAccess.READ_WRITE, format, descr);
        System.out.println("image created");
        //check queries
        assertEquals(descr.type, image.getImageType(), "Wrong image type");
        assertEquals(format, image.getImageFormat(), "Wrong image format");
        assertEquals(descr.width, image.getWidth(), "Wrong width");
        assertEquals(descr.height, image.getHeight(), "Wrong height");
        //fill with red and blue
        ColorRGBA color1 = ColorRGBA.Red;
        ColorRGBA color2 = ColorRGBA.Blue;
        Event e1 = image.fillAsync(clQueue, new long[] { 0, 0, 0 }, new long[] { descr.width / 2, descr.height, 1 }, color1);
        Event e2 = image.fillAsync(clQueue, new long[] { descr.width / 2, 0, 0 }, new long[] { descr.width / 2, descr.height, 1 }, color2);
        e1.waitForFinished();
        e2.waitForFinished();
        //copy to a buffer
        Buffer buffer = clContext.createBuffer(4 * 4 * 500 * 1024);
        Event e3 = image.copyToBufferAsync(clQueue, buffer, new long[] { 10, 10, 0 }, new long[] { 500, 1024, 1 }, 0);
        e3.release();
        //this buffer must be completely red
        ByteBuffer map1 = buffer.map(clQueue, MappingAccess.MAP_READ_ONLY);
        FloatBuffer map1F = map1.asFloatBuffer();
        map1F.rewind();
        for (int x = 0; x < 500; ++x) {
            for (int y = 0; y < 1024; ++y) {
                float r = map1F.get();
                float g = map1F.get();
                float b = map1F.get();
                float a = map1F.get();
                assertEquals(1, r, "Wrong red component");
                assertEquals(0, g, "Wrong green component");
                assertEquals(0, b, "Wrong blue component");
                assertEquals(1, a, "Wrong alpha component");
            }
        }
        buffer.unmap(clQueue, map1);
        //create a second image
        format = new Image.ImageFormat(Image.ImageChannelOrder.RGBA, Image.ImageChannelType.FLOAT);
        descr = new Image.ImageDescriptor(Image.ImageType.IMAGE_2D, 512, 512, 0, 0);
        Image image2 = clContext.createImage(MemoryAccess.READ_WRITE, format, descr);
        //copy an area of image1 to image2
        image.copyTo(clQueue, image2, new long[] { 1000, 20, 0 }, new long[] { 0, 0, 0 }, new long[] { 512, 512, 1 });
        //this area should be completely blue
        Image.ImageMapping map2 = image2.map(clQueue, new long[] { 0, 0, 0 }, new long[] { 512, 512, 1 }, MappingAccess.MAP_READ_WRITE);
        FloatBuffer map2F = map2.buffer.asFloatBuffer();
        for (int y = 0; y < 512; ++y) {
            for (int x = 0; x < 512; ++x) {
                long index = 4 * x + y * (map2.rowPitch / 4);
                map2F.position((int) index);
                float r = map2F.get();
                float g = map2F.get();
                float b = map2F.get();
                float a = map2F.get();
                assertEquals(0, r, "Wrong red component");
                assertEquals(0, g, "Wrong green component");
                assertEquals(1, b, "Wrong blue component");
                assertEquals(1, a, "Wrong alpha component");
            }
        }
        image2.unmap(clQueue, map2);
        //release
        image.release();
        image2.release();
        buffer.release();
    } catch (AssertionError ex) {
        LOG.log(Level.SEVERE, "image test failed with an assertion error");
        return false;
    } catch (Exception ex) {
        LOG.log(Level.SEVERE, "image test failed with:", ex);
        return false;
    }
    return true;
}
Also used : FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) FloatBuffer(java.nio.FloatBuffer) ByteBuffer(java.nio.ByteBuffer) ColorRGBA(com.jme3.math.ColorRGBA)

Aggregations

Image (com.jme3.texture.Image)68 ByteBuffer (java.nio.ByteBuffer)38 Texture (com.jme3.texture.Texture)27 Texture2D (com.jme3.texture.Texture2D)19 ArrayList (java.util.ArrayList)19 Material (com.jme3.material.Material)18 TextureKey (com.jme3.asset.TextureKey)17 Vector3f (com.jme3.math.Vector3f)17 Format (com.jme3.texture.Image.Format)15 TextureCubeMap (com.jme3.texture.TextureCubeMap)14 ColorRGBA (com.jme3.math.ColorRGBA)13 PixelInputOutput (com.jme3.scene.plugins.blender.textures.io.PixelInputOutput)12 BufferedImage (java.awt.image.BufferedImage)12 Geometry (com.jme3.scene.Geometry)10 InputStream (java.io.InputStream)10 IOException (java.io.IOException)8 TerrainLodControl (com.jme3.terrain.geomipmap.TerrainLodControl)7 TerrainQuad (com.jme3.terrain.geomipmap.TerrainQuad)7 DistanceLodCalculator (com.jme3.terrain.geomipmap.lodcalc.DistanceLodCalculator)7 AbstractHeightMap (com.jme3.terrain.heightmap.AbstractHeightMap)7