Search in sources :

Example 61 with Quad

use of com.jme3.scene.shape.Quad in project jmonkeyengine by jMonkeyEngine.

the class VRGuiManager method getGuiQuad.

/**
	 * Create a GUI quad for the given camera.
	 * @param sourceCam the camera
	 * @return a GUI quad for the given camera.
	 */
private Spatial getGuiQuad(Camera sourceCam) {
    if (environment != null) {
        if (environment.getApplication() != null) {
            if (guiQuadNode == null) {
                Vector2f guiCanvasSize = getCanvasSize();
                Camera offCamera = sourceCam.clone();
                offCamera.setParallelProjection(true);
                offCamera.setLocation(Vector3f.ZERO);
                offCamera.lookAt(Vector3f.UNIT_Z, Vector3f.UNIT_Y);
                offView = environment.getApplication().getRenderManager().createPreView("GUI View", offCamera);
                offView.setClearFlags(true, true, true);
                offView.setBackgroundColor(ColorRGBA.BlackNoAlpha);
                // create offscreen framebuffer
                FrameBuffer offBuffer = new FrameBuffer((int) guiCanvasSize.x, (int) guiCanvasSize.y, 1);
                //setup framebuffer's texture
                guiTexture = new Texture2D((int) guiCanvasSize.x, (int) guiCanvasSize.y, Format.RGBA8);
                guiTexture.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
                guiTexture.setMagFilter(Texture.MagFilter.Bilinear);
                //setup framebuffer to use texture
                offBuffer.setDepthBuffer(Format.Depth);
                offBuffer.setColorTexture(guiTexture);
                //set viewport to render to offscreen framebuffer
                offView.setOutputFrameBuffer(offBuffer);
                // setup framebuffer's scene
                Iterator<Spatial> spatialIter = environment.getApplication().getGuiViewPort().getScenes().iterator();
                while (spatialIter.hasNext()) {
                    offView.attachScene(spatialIter.next());
                }
                if (useCurvedSurface) {
                    guiQuad = (Geometry) environment.getApplication().getAssetManager().loadModel("Common/Util/gui_mesh.j3o");
                } else {
                    guiQuad = new Geometry("guiQuad", new CenterQuad(1f, 1f));
                }
                Material mat = new Material(environment.getApplication().getAssetManager(), "Common/MatDefs/VR/GuiOverlay.j3md");
                mat.getAdditionalRenderState().setDepthTest(!overdraw);
                mat.getAdditionalRenderState().setBlendMode(BlendMode.Alpha);
                mat.getAdditionalRenderState().setDepthWrite(false);
                mat.setTexture("ColorMap", guiTexture);
                guiQuad.setQueueBucket(Bucket.Translucent);
                guiQuad.setMaterial(mat);
                guiQuadNode = new Node("gui-quad-node");
                guiQuadNode.setQueueBucket(Bucket.Translucent);
                guiQuadNode.attachChild(guiQuad);
            }
            return guiQuadNode;
        } else {
            throw new IllegalStateException("VR GUI manager underlying environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("VR GUI manager is not attached to any environment.");
    }
}
Also used : Geometry(com.jme3.scene.Geometry) Texture2D(com.jme3.texture.Texture2D) Spatial(com.jme3.scene.Spatial) Vector2f(com.jme3.math.Vector2f) Node(com.jme3.scene.Node) Material(com.jme3.material.Material) Camera(com.jme3.renderer.Camera) FrameBuffer(com.jme3.texture.FrameBuffer) CenterQuad(com.jme3.scene.CenterQuad)

Example 62 with Quad

use of com.jme3.scene.shape.Quad in project jmonkeyengine by jMonkeyEngine.

the class OpenVRFilter method configureDistortionMesh.

/*
        function converted from:
        https://github.com/ValveSoftware/openvr/blob/master/samples/hellovr_opengl/hellovr_opengl_main.cpp#L1335
    */
private void configureDistortionMesh() {
    float m_iLensGridSegmentCountH = 43, m_iLensGridSegmentCountV = 43;
    float w = 1f / m_iLensGridSegmentCountH - 1f;
    float h = 1f / m_iLensGridSegmentCountV - 1f;
    float u, v;
    distortionMesh = new Mesh();
    float[] verts = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 3];
    float[] texcoordR = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
    float[] texcoordG = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
    float[] texcoordB = new float[(int) (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2];
    int vertPos = 0, coordPos = 0;
    //left eye distortion verts
    float Xoffset = -1f;
    for (int y = 0; y < m_iLensGridSegmentCountV; y++) {
        for (int x = 0; x < m_iLensGridSegmentCountH; x++) {
            u = x * w;
            v = 1 - y * h;
            // x
            verts[vertPos] = Xoffset + u;
            // y
            verts[vertPos + 1] = -1 + 2 * y * h;
            // z
            verts[vertPos + 2] = 0f;
            vertPos += 3;
            DistortionCoordinates_t dc0 = new DistortionCoordinates_t();
            ((VR_IVRSystem_FnTable) application.getVRHardware().getVRSystem()).ComputeDistortion.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, u, v, dc0);
            texcoordR[coordPos] = dc0.rfRed[0];
            texcoordR[coordPos + 1] = 1 - dc0.rfRed[1];
            texcoordG[coordPos] = dc0.rfGreen[0];
            texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1];
            texcoordB[coordPos] = dc0.rfBlue[0];
            texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1];
            coordPos += 2;
        }
    }
    //right eye distortion verts
    Xoffset = 0;
    for (int y = 0; y < m_iLensGridSegmentCountV; y++) {
        for (int x = 0; x < m_iLensGridSegmentCountH; x++) {
            u = x * w;
            v = 1 - y * h;
            // x
            verts[vertPos] = Xoffset + u;
            // y
            verts[vertPos + 1] = -1 + 2 * y * h;
            // z
            verts[vertPos + 2] = 0f;
            vertPos += 3;
            DistortionCoordinates_t dc0 = new DistortionCoordinates_t();
            ((VR_IVRSystem_FnTable) application.getVRHardware().getVRSystem()).ComputeDistortion.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, u, v, dc0);
            texcoordR[coordPos] = dc0.rfRed[0];
            texcoordR[coordPos + 1] = 1 - dc0.rfRed[1];
            texcoordG[coordPos] = dc0.rfGreen[0];
            texcoordG[coordPos + 1] = 1 - dc0.rfGreen[1];
            texcoordB[coordPos] = dc0.rfBlue[0];
            texcoordB[coordPos + 1] = 1 - dc0.rfBlue[1];
            coordPos += 2;
        }
    }
    // have UV coordinates & positions, now to setup indices
    //std::vector<GLushort> vIndices;
    int[] indices = new int[(int) ((m_iLensGridSegmentCountV - 1) * (m_iLensGridSegmentCountH - 1)) * 6];
    int indexPos = 0;
    int a, b, c, d;
    int offset = 0;
    for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) {
        for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) {
            a = (int) (m_iLensGridSegmentCountH * y + x + offset);
            b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset);
            c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset);
            d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset);
            indices[indexPos] = a;
            indices[indexPos + 1] = b;
            indices[indexPos + 2] = c;
            indices[indexPos + 3] = a;
            indices[indexPos + 4] = c;
            indices[indexPos + 5] = d;
            indexPos += 6;
        }
    }
    offset = (int) (m_iLensGridSegmentCountH * m_iLensGridSegmentCountV);
    for (int y = 0; y < m_iLensGridSegmentCountV - 1; y++) {
        for (int x = 0; x < m_iLensGridSegmentCountH - 1; x++) {
            a = (int) (m_iLensGridSegmentCountH * y + x + offset);
            b = (int) (m_iLensGridSegmentCountH * y + x + 1 + offset);
            c = (int) ((y + 1) * m_iLensGridSegmentCountH + x + 1 + offset);
            d = (int) ((y + 1) * m_iLensGridSegmentCountH + x + offset);
            indices[indexPos] = a;
            indices[indexPos + 1] = b;
            indices[indexPos + 2] = c;
            indices[indexPos + 3] = a;
            indices[indexPos + 4] = c;
            indices[indexPos + 5] = d;
            indexPos += 6;
        }
    }
    // OK, create the mesh        
    distortionMesh.setBuffer(VertexBuffer.Type.Position, 3, verts);
    distortionMesh.setBuffer(VertexBuffer.Type.Index, 1, indices);
    distortionMesh.setBuffer(VertexBuffer.Type.TexCoord, 2, texcoordR);
    // TODO: are TexCoord2 & TexCoord3 even implemented in jME3?
    distortionMesh.setBuffer(VertexBuffer.Type.TexCoord2, 2, texcoordG);
    distortionMesh.setBuffer(VertexBuffer.Type.TexCoord3, 2, texcoordB);
// TODO: make sure this distortion mesh is used instead of the fullscreen quad
// when filter gets rendered.. might require changes to jME3 core..?
}
Also used : DistortionCoordinates_t(com.jme3.system.jopenvr.DistortionCoordinates_t) Mesh(com.jme3.scene.Mesh)

Aggregations

Geometry (com.jme3.scene.Geometry)35 Quad (com.jme3.scene.shape.Quad)34 Material (com.jme3.material.Material)30 Vector3f (com.jme3.math.Vector3f)23 Node (com.jme3.scene.Node)10 Spatial (com.jme3.scene.Spatial)10 DirectionalLight (com.jme3.light.DirectionalLight)9 Texture (com.jme3.texture.Texture)9 Quaternion (com.jme3.math.Quaternion)8 TerrainQuad (com.jme3.terrain.geomipmap.TerrainQuad)7 ColorRGBA (com.jme3.math.ColorRGBA)6 AmbientLight (com.jme3.light.AmbientLight)5 Vector2f (com.jme3.math.Vector2f)5 Mesh (com.jme3.scene.Mesh)5 Texture2D (com.jme3.texture.Texture2D)5 Picture (com.jme3.ui.Picture)5 File (java.io.File)5 ScreenshotAppState (com.jme3.app.state.ScreenshotAppState)4 BulletAppState (com.jme3.bullet.BulletAppState)4 CapsuleCollisionShape (com.jme3.bullet.collision.shapes.CapsuleCollisionShape)4