use of com.jme3.texture.FrameBuffer in project jmonkeyengine by jMonkeyEngine.
the class ScreenshotAppState method postFrame.
public void postFrame(FrameBuffer out) {
if (capture) {
capture = false;
Camera curCamera = rm.getCurrentCamera();
int viewX = (int) (curCamera.getViewPortLeft() * curCamera.getWidth());
int viewY = (int) (curCamera.getViewPortBottom() * curCamera.getHeight());
int viewWidth = (int) ((curCamera.getViewPortRight() - curCamera.getViewPortLeft()) * curCamera.getWidth());
int viewHeight = (int) ((curCamera.getViewPortTop() - curCamera.getViewPortBottom()) * curCamera.getHeight());
renderer.setViewPort(0, 0, width, height);
renderer.readFrameBuffer(out, outBuf);
renderer.setViewPort(viewX, viewY, viewWidth, viewHeight);
File file;
String filename;
if (numbered) {
shotIndex++;
filename = shotName + shotIndex;
} else {
filename = shotName;
}
if (filePath == null) {
file = new File(JmeSystem.getStorageFolder() + File.separator + filename + ".png").getAbsoluteFile();
} else {
file = new File(filePath + filename + ".png").getAbsoluteFile();
}
logger.log(Level.FINE, "Saving ScreenShot to: {0}", file.getAbsolutePath());
try {
writeImageFile(file);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error while saving screenshot", ex);
}
}
}
use of com.jme3.texture.FrameBuffer in project jmonkeyengine by jMonkeyEngine.
the class TestPostFiltersCompositing method simpleInitApp.
public void simpleInitApp() {
this.flyCam.setMoveSpeed(10);
cam.setLocation(new Vector3f(0.028406568f, 2.015769f, 7.386517f));
cam.setRotation(new Quaternion(-1.0729783E-5f, 0.9999721f, -0.0073241726f, -0.0014647911f));
makeScene();
//Creating the main view port post processor
FilterPostProcessor fpp = new FilterPostProcessor(assetManager);
fpp.addFilter(new ColorOverlayFilter(ColorRGBA.Blue));
viewPort.addProcessor(fpp);
//creating a frame buffer for the mainviewport
FrameBuffer mainVPFrameBuffer = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
Texture2D mainVPTexture = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
mainVPFrameBuffer.addColorTexture(mainVPTexture);
mainVPFrameBuffer.setDepthBuffer(Image.Format.Depth);
viewPort.setOutputFrameBuffer(mainVPFrameBuffer);
//creating the post processor for the gui viewport
final FilterPostProcessor guifpp = new FilterPostProcessor(assetManager);
guifpp.setFrameBufferFormat(Image.Format.RGBA8);
guifpp.addFilter(new ColorOverlayFilter(ColorRGBA.Red));
//this will compose the main viewport texture with the guiviewport back buffer.
//Note that you can switch the order of the filters so that guiviewport filters are applied or not to the main viewport texture
guifpp.addFilter(new ComposeFilter(mainVPTexture));
guiViewPort.addProcessor(guifpp);
//compositing is done by mixing texture depending on the alpha channel,
//it's important that the guiviewport clear color alpha value is set to 0
guiViewPort.setBackgroundColor(ColorRGBA.BlackNoAlpha);
guiViewPort.setClearColor(true);
}
use of com.jme3.texture.FrameBuffer in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOSVR method setupFinalFullTexture.
private void setupFinalFullTexture(Camera cam) {
if (environment != null) {
if (environment.getApplication() != null) {
// create offscreen framebuffer
FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBuffer.setSrgb(true);
//setup framebuffer's texture
dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")");
logger.config(" Type: " + dualEyeTex.getType());
logger.config(" Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight());
logger.config(" Image depth: " + dualEyeTex.getImage().getDepth());
logger.config(" Image format: " + dualEyeTex.getImage().getFormat());
logger.config(" Image color space: " + dualEyeTex.getImage().getColorSpace());
//setup framebuffer to use texture
out.setDepthBuffer(Image.Format.Depth);
out.setColorTexture(dualEyeTex);
ViewPort viewPort = environment.getApplication().getViewPort();
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
viewPort.setOutputFrameBuffer(out);
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
use of com.jme3.texture.FrameBuffer in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOSVR method setupViewBuffers.
private ViewPort setupViewBuffers(Camera cam, String viewName) {
if (environment != null) {
if (environment.getApplication() != null) {
// create offscreen framebuffer
FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBufferLeft.setSrgb(true);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBufferLeft.setDepthBuffer(Image.Format.Depth);
offBufferLeft.setColorTexture(offTex);
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while (spatialIter.hasNext()) {
viewPort.attachScene(spatialIter.next());
}
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBufferLeft);
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
use of com.jme3.texture.FrameBuffer in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOpenVR method setupViewBuffers.
private ViewPort setupViewBuffers(Camera cam, String viewName) {
if (environment != null) {
if (environment.getApplication() != null) {
// create offscreen framebuffer
FrameBuffer offBufferLeft = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBufferLeft.setSrgb(true);
//setup framebuffer's texture
Texture2D offTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
offTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
offTex.setMagFilter(Texture.MagFilter.Bilinear);
//setup framebuffer to use texture
offBufferLeft.setDepthBuffer(Image.Format.Depth);
offBufferLeft.setColorTexture(offTex);
ViewPort viewPort = environment.getApplication().getRenderManager().createPreView(viewName, cam);
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
Iterator<Spatial> spatialIter = environment.getApplication().getViewPort().getScenes().iterator();
while (spatialIter.hasNext()) {
viewPort.attachScene(spatialIter.next());
}
//set viewport to render to offscreen framebuffer
viewPort.setOutputFrameBuffer(offBufferLeft);
return viewPort;
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
Aggregations