use of com.jme3.renderer.ViewPort in project jmonkeyengine by jMonkeyEngine.
the class LegacyApplication method initCamera.
/**
* Creates the camera to use for rendering. Default values are perspective
* projection with 45° field of view, with near and far values 1 and 1000
* units respectively.
*/
private void initCamera() {
cam = new Camera(settings.getWidth(), settings.getHeight());
cam.setFrustumPerspective(45f, (float) cam.getWidth() / cam.getHeight(), 1f, 1000f);
cam.setLocation(new Vector3f(0f, 0f, 10f));
cam.lookAt(new Vector3f(0f, 0f, 0f), Vector3f.UNIT_Y);
renderManager = new RenderManager(renderer);
//Remy - 09/14/2010 setted the timer in the renderManager
renderManager.setTimer(timer);
if (prof != null) {
renderManager.setAppProfiler(prof);
}
viewPort = renderManager.createMainView("Default", cam);
viewPort.setClearFlags(true, true, true);
// Create a new cam for the gui
Camera guiCam = new Camera(settings.getWidth(), settings.getHeight());
guiViewPort = renderManager.createPostView("Gui Default", guiCam);
guiViewPort.setClearFlags(false, false, false);
}
use of com.jme3.renderer.ViewPort in project jmonkeyengine by jMonkeyEngine.
the class TestFBOPassthrough method simpleRender.
@Override
public void simpleRender(RenderManager rm) {
Renderer r = rm.getRenderer();
//do FBO rendering
r.setFrameBuffer(fb);
// FBO uses current camera
rm.setCamera(cam, false);
r.clearBuffers(true, true, true);
rm.renderScene(fbNode, viewPort);
rm.flushQueue(viewPort);
//go back to default rendering and let
//SimpleApplication render the default scene
r.setFrameBuffer(null);
}
use of com.jme3.renderer.ViewPort in project jmonkeyengine by jMonkeyEngine.
the class TestMultiViewsFilters method simpleInitApp.
public void simpleInitApp() {
// create the geometry and attach it
Geometry teaGeom = (Geometry) assetManager.loadModel("Models/Teapot/Teapot.obj");
teaGeom.scale(3);
teaGeom.getMaterial().setColor("GlowColor", ColorRGBA.Green);
DirectionalLight dl = new DirectionalLight();
dl.setColor(ColorRGBA.White);
dl.setDirection(Vector3f.UNIT_XYZ.negate());
rootNode.addLight(dl);
rootNode.attachChild(teaGeom);
// Setup first view
cam.setViewPort(.5f, 1f, 0f, 0.5f);
cam.setLocation(new Vector3f(3.3212643f, 4.484704f, 4.2812433f));
cam.setRotation(new Quaternion(-0.07680723f, 0.92299235f, -0.2564353f, -0.27645364f));
// Setup second view
Camera cam2 = cam.clone();
cam2.setViewPort(0f, 0.5f, 0f, 0.5f);
cam2.setLocation(new Vector3f(-0.10947256f, 1.5760219f, 4.81758f));
cam2.setRotation(new Quaternion(0.0010108891f, 0.99857414f, -0.04928594f, 0.020481428f));
final ViewPort view2 = renderManager.createMainView("Bottom Left", cam2);
view2.setClearFlags(true, true, true);
view2.attachScene(rootNode);
// Setup third view
Camera cam3 = cam.clone();
cam3.setName("cam3");
cam3.setViewPort(0f, .5f, .5f, 1f);
cam3.setLocation(new Vector3f(0.2846221f, 6.4271426f, 0.23380789f));
cam3.setRotation(new Quaternion(0.004381671f, 0.72363687f, -0.69015175f, 0.0045953835f));
final ViewPort view3 = renderManager.createMainView("Top Left", cam3);
view3.setClearFlags(true, true, true);
view3.attachScene(rootNode);
// Setup fourth view
Camera cam4 = cam.clone();
cam4.setName("cam4");
cam4.setViewPort(.5f, 1f, .5f, 1f);
cam4.setLocation(new Vector3f(4.775564f, 1.4548365f, 0.11491505f));
cam4.setRotation(new Quaternion(0.02356979f, -0.74957186f, 0.026729556f, 0.66096294f));
final ViewPort view4 = renderManager.createMainView("Top Right", cam4);
view4.setClearFlags(true, true, true);
view4.attachScene(rootNode);
// Camera cam5 = new Camera(200, 200);
// cam5.setFrustumPerspective(45f, (float)cam.getWidth() / cam.getHeight(), 1f, 1000f);
// cam5.setName("cam5");
// cam5.setViewPort(5.23f, 6.33f, 0.56f, 1.66f);
// this.setViewPortAreas(5.23f, 6.33f, 0.56f, 1.66f);
// this.setViewPortCamSize(200, 200);
// 1046,1266,112,332
Camera cam5 = cam.clone();
cam5.setName("cam5");
cam5.setViewPort(1046f / settings.getWidth(), 1266f / settings.getWidth(), 112f / settings.getHeight(), 332f / settings.getHeight());
cam5.setLocation(new Vector3f(0.2846221f, 6.4271426f, 0.23380789f));
cam5.setRotation(new Quaternion(0.004381671f, 0.72363687f, -0.69015175f, 0.0045953835f));
final ViewPort view5 = renderManager.createMainView("center", cam5);
view5.setClearFlags(true, true, true);
view5.attachScene(rootNode);
rootNode.attachChild(SkyFactory.createSky(assetManager, "Textures/Sky/Bright/BrightSky.dds", false));
final FilterPostProcessor fpp = new FilterPostProcessor(assetManager);
final FilterPostProcessor fpp2 = new FilterPostProcessor(assetManager);
final FilterPostProcessor fpp3 = new FilterPostProcessor(assetManager);
final FilterPostProcessor fpp4 = new FilterPostProcessor(assetManager);
final FilterPostProcessor fpp5 = new FilterPostProcessor(assetManager);
// fpp.addFilter(new WaterFilter(rootNode, Vector3f.UNIT_Y.mult(-1)));
fpp3.addFilter(new CartoonEdgeFilter());
fpp2.addFilter(new CrossHatchFilter());
final FogFilter ff = new FogFilter(ColorRGBA.Yellow, 0.7f, 2);
fpp.addFilter(ff);
final RadialBlurFilter rbf = new RadialBlurFilter(1, 10);
// rbf.setEnabled(false);
fpp.addFilter(rbf);
SSAOFilter f = new SSAOFilter(1.8899765f, 20.490374f, 0.4699998f, 0.1f);
;
fpp4.addFilter(f);
SSAOUI ui = new SSAOUI(inputManager, f);
fpp5.addFilter(new BloomFilter(BloomFilter.GlowMode.Objects));
viewPort.addProcessor(fpp);
view2.addProcessor(fpp2);
view3.addProcessor(fpp3);
view4.addProcessor(fpp4);
view5.addProcessor(fpp5);
inputManager.addListener(new ActionListener() {
public void onAction(String name, boolean isPressed, float tpf) {
if (name.equals("press") && isPressed) {
if (filterEnabled) {
viewPort.removeProcessor(fpp);
view2.removeProcessor(fpp2);
view3.removeProcessor(fpp3);
view4.removeProcessor(fpp4);
view5.removeProcessor(fpp5);
} else {
viewPort.addProcessor(fpp);
view2.addProcessor(fpp2);
view3.addProcessor(fpp3);
view4.addProcessor(fpp4);
view5.addProcessor(fpp5);
}
filterEnabled = !filterEnabled;
}
if (name.equals("filter") && isPressed) {
ff.setEnabled(!ff.isEnabled());
rbf.setEnabled(!rbf.isEnabled());
}
}
}, "press", "filter");
inputManager.addMapping("press", new KeyTrigger(KeyInput.KEY_SPACE));
inputManager.addMapping("filter", new KeyTrigger(KeyInput.KEY_F));
}
use of com.jme3.renderer.ViewPort in project jmonkeyengine by jMonkeyEngine.
the class TestPostFiltersCompositing method simpleInitApp.
public void simpleInitApp() {
this.flyCam.setMoveSpeed(10);
cam.setLocation(new Vector3f(0.028406568f, 2.015769f, 7.386517f));
cam.setRotation(new Quaternion(-1.0729783E-5f, 0.9999721f, -0.0073241726f, -0.0014647911f));
makeScene();
//Creating the main view port post processor
FilterPostProcessor fpp = new FilterPostProcessor(assetManager);
fpp.addFilter(new ColorOverlayFilter(ColorRGBA.Blue));
viewPort.addProcessor(fpp);
//creating a frame buffer for the mainviewport
FrameBuffer mainVPFrameBuffer = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
Texture2D mainVPTexture = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
mainVPFrameBuffer.addColorTexture(mainVPTexture);
mainVPFrameBuffer.setDepthBuffer(Image.Format.Depth);
viewPort.setOutputFrameBuffer(mainVPFrameBuffer);
//creating the post processor for the gui viewport
final FilterPostProcessor guifpp = new FilterPostProcessor(assetManager);
guifpp.setFrameBufferFormat(Image.Format.RGBA8);
guifpp.addFilter(new ColorOverlayFilter(ColorRGBA.Red));
//this will compose the main viewport texture with the guiviewport back buffer.
//Note that you can switch the order of the filters so that guiviewport filters are applied or not to the main viewport texture
guifpp.addFilter(new ComposeFilter(mainVPTexture));
guiViewPort.addProcessor(guifpp);
//compositing is done by mixing texture depending on the alpha channel,
//it's important that the guiviewport clear color alpha value is set to 0
guiViewPort.setBackgroundColor(ColorRGBA.BlackNoAlpha);
guiViewPort.setClearColor(true);
}
use of com.jme3.renderer.ViewPort in project jmonkeyengine by jMonkeyEngine.
the class AbstractVRViewManager method syncScreenProcessing.
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if (environment != null) {
if (getRightViewport() == null) {
return;
}
if (environment.getApplication() != null) {
// setup post processing filters
if (getRightPostProcessor() == null) {
rightPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
leftPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
}
// clear out all filters & processors, to start from scratch
getRightPostProcessor().removeAllFilters();
getLeftPostProcessor().removeAllFilters();
getLeftViewport().clearProcessors();
getRightViewport().clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if (sourceViewport.getProcessors().isEmpty())
return;
// add post processors we just made, which are empty
getLeftViewport().addProcessor(getLeftPostProcessor());
getRightViewport().addProcessor(getRightPostProcessor());
// add them to the left viewport processor & clone them to the right
for (SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for (Filter f : ((FilterPostProcessor) sceneProcessor).getFilterList()) {
if (f instanceof TranslucentBucketFilter) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor) sceneProcessor).removeFilter(f);
} else {
getLeftPostProcessor().addFilter(f);
// clone to the right
Filter f2;
if (f instanceof FogFilter) {
f2 = FilterUtil.cloneFogFilter((FogFilter) f);
} else if (f instanceof CartoonSSAO) {
f2 = new CartoonSSAO((CartoonSSAO) f);
} else if (f instanceof SSAOFilter) {
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter) f);
} else if (f instanceof DirectionalLightShadowFilter) {
f2 = FilterUtil.cloneDirectionalLightShadowFilter(environment.getApplication().getAssetManager(), (DirectionalLightShadowFilter) f);
} else {
// dof, bloom, lightscattering etc.
f2 = f;
}
getRightPostProcessor().addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
getRightViewport().getProcessors().add(0, dlsrRight);
getLeftViewport().getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
getLeftPostProcessor().addFilter(new TranslucentBucketFilter());
getRightPostProcessor().addFilter(new TranslucentBucketFilter());
} else {
throw new IllegalStateException("The VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
Aggregations