Search in sources :

Example 6 with Environment

use of com.jme3.audio.Environment in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method setupFinalFullTexture.

private void setupFinalFullTexture(Camera cam) {
    if (environment != null) {
        if (environment.getApplication() != null) {
            // create offscreen framebuffer
            FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
            //offBuffer.setSrgb(true);
            //setup framebuffer's texture
            dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
            dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
            dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
            logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")");
            logger.config("               Type: " + dualEyeTex.getType());
            logger.config("               Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight());
            logger.config("        Image depth: " + dualEyeTex.getImage().getDepth());
            logger.config("       Image format: " + dualEyeTex.getImage().getFormat());
            logger.config("  Image color space: " + dualEyeTex.getImage().getColorSpace());
            //setup framebuffer to use texture
            out.setDepthBuffer(Image.Format.Depth);
            out.setColorTexture(dualEyeTex);
            ViewPort viewPort = environment.getApplication().getViewPort();
            viewPort.setClearFlags(true, true, true);
            viewPort.setBackgroundColor(ColorRGBA.Black);
            viewPort.setOutputFrameBuffer(out);
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : Texture2D(com.jme3.texture.Texture2D) ViewPort(com.jme3.renderer.ViewPort) FrameBuffer(com.jme3.texture.FrameBuffer)

Example 7 with Environment

use of com.jme3.audio.Environment in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method postRender.

/**
     * Send the textures to the two eyes.
     */
public void postRender() {
    if (environment != null) {
        if (environment.isInVR()) {
            VRAPI api = environment.getVRHardware();
            if (api.getCompositor() != null) {
                // using the compositor...
                int errl = 0, errr = 0;
                if (environment.isInstanceRendering()) {
                    if (leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId()) {
                        leftTextureType.handle = getFullTexId();
                        if (leftTextureType.handle != -1) {
                            leftTextureType.write();
                            if (api instanceof OSVR) {
                                osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
                                osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = dualEyeTex.getImage().getId();
                                osvr_renderBuffer[OSVR.EYE_LEFT].write();
                                registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
                            }
                        }
                    } else {
                        if (api instanceof OSVR) {
                            ((OSVR) api).handleRenderBufferPresent(osvr_viewDescLeft, osvr_viewDescRight, osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_LEFT]);
                        }
                    }
                } else if (leftTextureType.handle == -1 || rightTextureType.handle == -1 || leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId()) {
                    leftTextureType.handle = getLeftTexId();
                    if (leftTextureType.handle != -1) {
                        logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer());
                        leftTextureType.write();
                        if (api instanceof OSVR) {
                            osvr_renderBuffer[OSVR.EYE_LEFT].colorBufferName = leftTextureType.handle;
                            if (leftEyeDepth != null)
                                osvr_renderBuffer[OSVR.EYE_LEFT].depthStencilBufferName = leftEyeDepth.getImage().getId();
                            osvr_renderBuffer[OSVR.EYE_LEFT].write();
                            registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_LEFT]);
                        }
                    }
                    rightTextureType.handle = getRightTexId();
                    if (rightTextureType.handle != -1) {
                        logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer());
                        rightTextureType.write();
                        if (api instanceof OSVR) {
                            osvr_renderBuffer[OSVR.EYE_RIGHT].colorBufferName = rightTextureType.handle;
                            if (rightEyeDepth != null)
                                osvr_renderBuffer[OSVR.EYE_RIGHT].depthStencilBufferName = rightEyeDepth.getImage().getId();
                            osvr_renderBuffer[OSVR.EYE_RIGHT].write();
                            registerOSVRBuffer(osvr_renderBuffer[OSVR.EYE_RIGHT]);
                        }
                    }
                } else {
                    if (api instanceof OSVR) {
                        ((OSVR) api).handleRenderBufferPresent(osvr_viewDescFull, osvr_viewDescFull, osvr_renderBuffer[OSVR.EYE_LEFT], osvr_renderBuffer[OSVR.EYE_RIGHT]);
                    }
                }
                if (errl != 0) {
                    logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl) + " (" + Integer.toString(errl) + ")");
                    logger.severe("  Texture color space: " + OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
                    logger.severe("  Texture type: " + OpenVRUtil.getETextureTypeString(leftTextureType.eType));
                    logger.severe("  Texture handle: " + leftTextureType.handle);
                    logger.severe("  Left eye texture " + leftEyeTexture.getName() + " (" + leftEyeTexture.getImage().getId() + ")");
                    logger.severe("                 Type: " + leftEyeTexture.getType());
                    logger.severe("                 Size: " + leftEyeTexture.getImage().getWidth() + "x" + leftEyeTexture.getImage().getHeight());
                    logger.severe("          Image depth: " + leftEyeTexture.getImage().getDepth());
                    logger.severe("         Image format: " + leftEyeTexture.getImage().getFormat());
                    logger.severe("    Image color space: " + leftEyeTexture.getImage().getColorSpace());
                }
                if (errr != 0) {
                    logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl) + " (" + Integer.toString(errl) + ")");
                    logger.severe("  Texture color space: " + OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
                    logger.severe("  Texture type: " + OpenVRUtil.getETextureTypeString(rightTextureType.eType));
                    logger.severe("  Texture handle: " + rightTextureType.handle);
                    logger.severe("  Right eye texture " + rightEyeTexture.getName() + " (" + rightEyeTexture.getImage().getId() + ")");
                    logger.severe("                 Type: " + rightEyeTexture.getType());
                    logger.severe("                 Size: " + rightEyeTexture.getImage().getWidth() + "x" + rightEyeTexture.getImage().getHeight());
                    logger.severe("          Image depth: " + rightEyeTexture.getImage().getDepth());
                    logger.severe("         Image format: " + rightEyeTexture.getImage().getFormat());
                    logger.severe("    Image color space: " + rightEyeTexture.getImage().getColorSpace());
                }
            }
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : OSVR(com.jme3.input.vr.OSVR) VRAPI(com.jme3.input.vr.VRAPI)

Example 8 with Environment

use of com.jme3.audio.Environment in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method prepareCameraSize.

/**
     * Prepare the size of the given {@link Camera camera} to adapt it to the underlying rendering context.
     * @param cam the {@link Camera camera} to prepare.
     * @param xMult the camera width multiplier.
     */
private void prepareCameraSize(Camera cam, float xMult) {
    if (environment != null) {
        if (environment.getApplication() != null) {
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
        Vector2f size = new Vector2f();
        VRAPI vrhmd = environment.getVRHardware();
        if (vrhmd == null) {
            size.x = 1280f;
            size.y = 720f;
        } else {
            vrhmd.getRenderSize(size);
        }
        if (size.x < environment.getApplication().getContext().getSettings().getWidth()) {
            size.x = environment.getApplication().getContext().getSettings().getWidth();
        }
        if (size.y < environment.getApplication().getContext().getSettings().getHeight()) {
            size.y = environment.getApplication().getContext().getSettings().getHeight();
        }
        if (environment.isInstanceRendering()) {
            size.x *= 2f;
        }
        // other adjustments
        size.x *= xMult;
        size.x *= resMult;
        size.y *= resMult;
        if (cam.getWidth() != size.x || cam.getHeight() != size.y) {
            cam.resize((int) size.x, (int) size.y, false);
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : Vector2f(com.jme3.math.Vector2f) VRAPI(com.jme3.input.vr.VRAPI)

Example 9 with Environment

use of com.jme3.audio.Environment in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method setupViewBuffers.

private ViewPort setupViewBuffers(Camera cam, String viewName) {
    if (environment != null) {
        if (environment.getApplication() != null) {
            // create offscreen framebuffer
            FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
            //offBufferLeft.setSrgb(true);
            //setup framebuffer's texture
            Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
            offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
            offTex.setMagFilter(Texture.MagFilter.Bilinear);
            //setup framebuffer to use texture
            offBufferLeft.setDepthBuffer(Image.Format.Depth);
            offBufferLeft.setColorTexture(offTex);
            ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
            viewPort.setClearFlags(true, true, true);
            viewPort.setBackgroundColor(ColorRGBA.Black);
            Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
            while (spatialIter.hasNext()) {
                viewPort.attachScene(spatialIter.next());
            }
            //set viewport to render to offscreen framebuffer
            viewPort.setOutputFrameBuffer(offBufferLeft);
            return viewPort;
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : Texture2D(com.jme3.texture.Texture2D) Spatial(com.jme3.scene.Spatial) ViewPort(com.jme3.renderer.ViewPort) FrameBuffer(com.jme3.texture.FrameBuffer)

Example 10 with Environment

use of com.jme3.audio.Environment in project jmonkeyengine by jMonkeyEngine.

the class VRViewManagerOSVR method initialize.

/**
     * Initialize the VR view manager.
     */
public void initialize() {
    logger.config("Initializing VR view manager.");
    if (environment != null) {
        initTextureSubmitStructs();
        setupCamerasAndViews();
        setupVRScene();
        moveScreenProcessingToEyes();
        if (environment.hasTraditionalGUIOverlay()) {
            environment.getVRMouseManager().initialize();
            // update the pose to position the gui correctly on start
            update(0f);
            environment.getVRGUIManager().positionGui();
        }
        if (environment.getApplication() != null) {
            // if we are OSVR, our primary mirror window needs to be the same size as the render manager's output...
            if (environment.getVRHardware() instanceof OSVR) {
                int origWidth = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getWidth();
                int origHeight = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode().getHeight();
                long window = ((LwjglWindow) environment.getApplication().getContext()).getWindowHandle();
                Vector2f windowSize = new Vector2f();
                ((OSVR) environment.getVRHardware()).getRenderSize(windowSize);
                windowSize.x = Math.max(windowSize.x * 2f, leftCamera.getWidth());
                org.lwjgl.glfw.GLFW.glfwSetWindowSize(window, (int) windowSize.x, (int) windowSize.y);
                environment.getApplication().getContext().getSettings().setResolution((int) windowSize.x, (int) windowSize.y);
                if (environment.getApplication().getRenderManager() != null) {
                    environment.getApplication().getRenderManager().notifyReshape((int) windowSize.x, (int) windowSize.y);
                }
                org.lwjgl.glfw.GLFW.glfwSetWindowPos(window, origWidth - (int) windowSize.x, 32);
                org.lwjgl.glfw.GLFW.glfwFocusWindow(window);
                org.lwjgl.glfw.GLFW.glfwSetCursorPos(window, origWidth / 2.0, origHeight / 2.0);
                logger.config("Initialized VR view manager [SUCCESS]");
            } else {
                throw new IllegalStateException("Underlying VR hardware should be " + OSVR.class.getSimpleName());
            }
        } else {
            throw new IllegalStateException("This VR environment is not attached to any application.");
        }
    } else {
        throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
    }
}
Also used : OSVR(com.jme3.input.vr.OSVR) Vector2f(com.jme3.math.Vector2f) LwjglWindow(com.jme3.system.lwjgl.LwjglWindow)

Aggregations

Camera (com.jme3.renderer.Camera)9 Vector2f (com.jme3.math.Vector2f)8 Vector3f (com.jme3.math.Vector3f)8 Spatial (com.jme3.scene.Spatial)7 VRAPI (com.jme3.input.vr.VRAPI)6 ViewPort (com.jme3.renderer.ViewPort)6 Texture2D (com.jme3.texture.Texture2D)6 Material (com.jme3.material.Material)5 Geometry (com.jme3.scene.Geometry)5 Node (com.jme3.scene.Node)5 FrameBuffer (com.jme3.texture.FrameBuffer)5 AudioNode (com.jme3.audio.AudioNode)4 OSVR (com.jme3.input.vr.OSVR)4 Quaternion (com.jme3.math.Quaternion)4 OpenVR (com.jme3.input.vr.OpenVR)3 ColorRGBA (com.jme3.math.ColorRGBA)3 FilterPostProcessor (com.jme3.post.FilterPostProcessor)3 Environment (com.jme3.audio.Environment)2 AmbientLight (com.jme3.light.AmbientLight)2 CartoonSSAO (com.jme3.post.CartoonSSAO)2