use of com.jme3.post.SceneProcessor in project jmonkeyengine by jMonkeyEngine.
the class RenderManager method renderViewPort.
/**
* Renders the {@link ViewPort}.
* <p>
* If the ViewPort is {@link ViewPort#isEnabled() disabled}, this method
* returns immediately. Otherwise, the ViewPort is rendered by
* the following process:<br>
* <ul>
* <li>All {@link SceneProcessor scene processors} that are attached
* to the ViewPort are {@link SceneProcessor#initialize(com.jme3.renderer.RenderManager, com.jme3.renderer.ViewPort) initialized}.
* </li>
* <li>The SceneProcessors' {@link SceneProcessor#preFrame(float) } method
* is called.</li>
* <li>The ViewPort's {@link ViewPort#getOutputFrameBuffer() output framebuffer}
* is set on the Renderer</li>
* <li>The camera is set on the renderer, including its view port parameters.
* (see {@link #setCamera(com.jme3.renderer.Camera, boolean) })</li>
* <li>Any buffers that the ViewPort requests to be cleared are cleared
* and the {@link ViewPort#getBackgroundColor() background color} is set</li>
* <li>Every scene that is attached to the ViewPort is flattened into
* the ViewPort's render queue
* (see {@link #renderViewPortQueues(com.jme3.renderer.ViewPort, boolean) })
* </li>
* <li>The SceneProcessors' {@link SceneProcessor#postQueue(com.jme3.renderer.queue.RenderQueue) }
* method is called.</li>
* <li>The render queue is sorted and then flushed, sending
* rendering commands to the underlying Renderer implementation.
* (see {@link #flushQueue(com.jme3.renderer.ViewPort) })</li>
* <li>The SceneProcessors' {@link SceneProcessor#postFrame(com.jme3.texture.FrameBuffer) }
* method is called.</li>
* <li>The translucent queue of the ViewPort is sorted and then flushed
* (see {@link #renderTranslucentQueue(com.jme3.renderer.ViewPort) })</li>
* <li>If any objects remained in the render queue, they are removed
* from the queue. This is generally objects added to the
* {@link RenderQueue#renderShadowQueue(com.jme3.renderer.queue.RenderQueue.ShadowMode, com.jme3.renderer.RenderManager, com.jme3.renderer.Camera, boolean)
* shadow queue}
* which were not rendered because of a missing shadow renderer.</li>
* </ul>
*
* @param vp View port to render
* @param tpf Time per frame value
*/
public void renderViewPort(ViewPort vp, float tpf) {
if (!vp.isEnabled()) {
return;
}
if (prof != null)
prof.vpStep(VpStep.BeginRender, vp, null);
SafeArrayList<SceneProcessor> processors = vp.getProcessors();
if (processors.isEmpty()) {
processors = null;
}
if (processors != null) {
if (prof != null)
prof.vpStep(VpStep.PreFrame, vp, null);
for (SceneProcessor proc : processors.getArray()) {
if (!proc.isInitialized()) {
proc.initialize(this, vp);
}
proc.setProfiler(this.prof);
if (prof != null)
prof.spStep(SpStep.ProcPreFrame, proc.getClass().getSimpleName());
proc.preFrame(tpf);
}
}
renderer.setFrameBuffer(vp.getOutputFrameBuffer());
setCamera(vp.getCamera(), false);
if (vp.isClearDepth() || vp.isClearColor() || vp.isClearStencil()) {
if (vp.isClearColor()) {
renderer.setBackgroundColor(vp.getBackgroundColor());
}
renderer.clearBuffers(vp.isClearColor(), vp.isClearDepth(), vp.isClearStencil());
}
if (prof != null)
prof.vpStep(VpStep.RenderScene, vp, null);
List<Spatial> scenes = vp.getScenes();
for (int i = scenes.size() - 1; i >= 0; i--) {
renderScene(scenes.get(i), vp);
}
if (processors != null) {
if (prof != null)
prof.vpStep(VpStep.PostQueue, vp, null);
for (SceneProcessor proc : processors.getArray()) {
if (prof != null)
prof.spStep(SpStep.ProcPostQueue, proc.getClass().getSimpleName());
proc.postQueue(vp.getQueue());
}
}
if (prof != null)
prof.vpStep(VpStep.FlushQueue, vp, null);
flushQueue(vp);
if (processors != null) {
if (prof != null)
prof.vpStep(VpStep.PostFrame, vp, null);
for (SceneProcessor proc : processors.getArray()) {
if (prof != null)
prof.spStep(SpStep.ProcPostFrame, proc.getClass().getSimpleName());
proc.postFrame(vp.getOutputFrameBuffer());
}
if (prof != null)
prof.vpStep(VpStep.ProcEndRender, vp, null);
}
//renders the translucent objects queue after processors have been rendered
renderTranslucentQueue(vp);
// clear any remaining spatials that were not rendered.
clearQueue(vp);
if (prof != null)
prof.vpStep(VpStep.EndRender, vp, null);
}
use of com.jme3.post.SceneProcessor in project jmonkeyengine by jMonkeyEngine.
the class AbstractVRViewManager method syncScreenProcessing.
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if (environment != null) {
if (getRightViewport() == null) {
return;
}
if (environment.getApplication() != null) {
// setup post processing filters
if (getRightPostProcessor() == null) {
rightPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
leftPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
}
// clear out all filters & processors, to start from scratch
getRightPostProcessor().removeAllFilters();
getLeftPostProcessor().removeAllFilters();
getLeftViewport().clearProcessors();
getRightViewport().clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if (sourceViewport.getProcessors().isEmpty())
return;
// add post processors we just made, which are empty
getLeftViewport().addProcessor(getLeftPostProcessor());
getRightViewport().addProcessor(getRightPostProcessor());
// add them to the left viewport processor & clone them to the right
for (SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for (Filter f : ((FilterPostProcessor) sceneProcessor).getFilterList()) {
if (f instanceof TranslucentBucketFilter) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor) sceneProcessor).removeFilter(f);
} else {
getLeftPostProcessor().addFilter(f);
// clone to the right
Filter f2;
if (f instanceof FogFilter) {
f2 = FilterUtil.cloneFogFilter((FogFilter) f);
} else if (f instanceof CartoonSSAO) {
f2 = new CartoonSSAO((CartoonSSAO) f);
} else if (f instanceof SSAOFilter) {
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter) f);
} else if (f instanceof DirectionalLightShadowFilter) {
f2 = FilterUtil.cloneDirectionalLightShadowFilter(environment.getApplication().getAssetManager(), (DirectionalLightShadowFilter) f);
} else {
// dof, bloom, lightscattering etc.
f2 = f;
}
getRightPostProcessor().addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
getRightViewport().getProcessors().add(0, dlsrRight);
getLeftViewport().getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
getLeftPostProcessor().addFilter(new TranslucentBucketFilter());
getRightPostProcessor().addFilter(new TranslucentBucketFilter());
} else {
throw new IllegalStateException("The VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
use of com.jme3.post.SceneProcessor in project jmonkeyengine by jMonkeyEngine.
the class AwtPanel method reshapeInThread.
private void reshapeInThread(int width, int height) {
byteBuf = BufferUtils.ensureLargeEnough(byteBuf, width * height * 4);
intBuf = byteBuf.asIntBuffer();
if (fb != null) {
fb.dispose();
fb = null;
}
fb = new FrameBuffer(width, height, 1);
fb.setDepthBuffer(Format.Depth);
fb.setColorBuffer(Format.RGB8);
fb.setSrgb(srgb);
if (attachAsMain) {
rm.getRenderer().setMainFrameBufferOverride(fb);
}
synchronized (lock) {
img = new BufferedImage(width, height, BufferedImage.TYPE_INT_BGR);
}
// synchronized (lock){
// img = (BufferedImage) getGraphicsConfiguration().createCompatibleImage(width, height);
// }
AffineTransform tx = AffineTransform.getScaleInstance(1, -1);
tx.translate(0, -img.getHeight());
transformOp = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
for (ViewPort vp : viewPorts) {
if (!attachAsMain) {
vp.setOutputFrameBuffer(fb);
}
vp.getCamera().resize(width, height, true);
// Main framebuffer should use RenderManager.notifyReshape().
for (SceneProcessor sp : vp.getProcessors()) {
sp.reshape(vp, width, height);
}
}
}
use of com.jme3.post.SceneProcessor in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOSVR method syncScreenProcessing.
/**
* Sets the two views to use the list of {@link SceneProcessor processors}.
* @param sourceViewport the {@link ViewPort viewport} that contains the processors to use.
*/
public void syncScreenProcessing(ViewPort sourceViewport) {
if (rightViewport == null) {
return;
}
if (environment != null) {
if (environment.getApplication() != null) {
// setup post processing filters
if (rightPostProcessor == null) {
rightPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
leftPostProcessor = new FilterPostProcessor(environment.getApplication().getAssetManager());
}
// clear out all filters & processors, to start from scratch
rightPostProcessor.removeAllFilters();
leftPostProcessor.removeAllFilters();
leftViewport.clearProcessors();
rightViewport.clearProcessors();
// if we have no processors to sync, don't add the FilterPostProcessor
if (sourceViewport.getProcessors().isEmpty())
return;
// add post processors we just made, which are empty
leftViewport.addProcessor(leftPostProcessor);
rightViewport.addProcessor(rightPostProcessor);
// add them to the left viewport processor & clone them to the right
for (SceneProcessor sceneProcessor : sourceViewport.getProcessors()) {
if (sceneProcessor instanceof FilterPostProcessor) {
for (Filter f : ((FilterPostProcessor) sceneProcessor).getFilterList()) {
if (f instanceof TranslucentBucketFilter) {
// just remove this filter, we will add it at the end manually
((FilterPostProcessor) sceneProcessor).removeFilter(f);
} else {
leftPostProcessor.addFilter(f);
// clone to the right
Filter f2;
if (f instanceof FogFilter) {
f2 = FilterUtil.cloneFogFilter((FogFilter) f);
} else if (f instanceof CartoonSSAO) {
f2 = new CartoonSSAO((CartoonSSAO) f);
} else if (f instanceof SSAOFilter) {
f2 = FilterUtil.cloneSSAOFilter((SSAOFilter) f);
} else if (f instanceof DirectionalLightShadowFilter) {
f2 = FilterUtil.cloneDirectionalLightShadowFilter(environment.getApplication().getAssetManager(), (DirectionalLightShadowFilter) f);
} else {
// dof, bloom, lightscattering etc.
f2 = f;
}
rightPostProcessor.addFilter(f2);
}
}
} else if (sceneProcessor instanceof VRDirectionalLightShadowRenderer) {
// shadow processing
// TODO: make right shadow processor use same left shadow maps for performance
VRDirectionalLightShadowRenderer dlsr = (VRDirectionalLightShadowRenderer) sceneProcessor;
VRDirectionalLightShadowRenderer dlsrRight = dlsr.clone();
dlsrRight.setLight(dlsr.getLight());
rightViewport.getProcessors().add(0, dlsrRight);
leftViewport.getProcessors().add(0, sceneProcessor);
}
}
// make sure each has a translucent filter renderer
leftPostProcessor.addFilter(new TranslucentBucketFilter());
rightPostProcessor.addFilter(new TranslucentBucketFilter());
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
Aggregations