use of com.jme3.scene.VertexBuffer.Format in project jmonkeyengine by jMonkeyEngine.
the class RawHeightMap method load.
/**
* <code>load</code> fills the height data array with the appropriate data
* from the set RAW image. If the RAW image has not been set a JmeException
* will be thrown.
*
* @return true if the load is successfull, false otherwise.
*/
@Override
public boolean load() {
// confirm data has been set. Redundant check...
if (null == stream || size <= 0) {
throw new RuntimeException("Must supply valid stream and " + "size (> 0)");
}
// clean up
if (null != heightData) {
unloadHeightMap();
}
// initialize the height data attributes
heightData = new float[size * size];
// attempt to connect to the supplied file.
BufferedInputStream bis = null;
try {
bis = new BufferedInputStream(stream);
if (format == RawHeightMap.FORMAT_16BITLE) {
LittleEndien dis = new LittleEndien(bis);
int index;
// read the raw file
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
if (swapxy) {
index = i + j * size;
} else {
index = (i * size) + j;
}
heightData[index] = dis.readUnsignedShort();
}
}
dis.close();
} else {
DataInputStream dis = new DataInputStream(bis);
// read the raw file
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
int index;
if (swapxy) {
index = i + j * size;
} else {
index = (i * size) + j;
}
if (format == RawHeightMap.FORMAT_16BITBE) {
heightData[index] = dis.readUnsignedShort();
} else {
heightData[index] = dis.readUnsignedByte();
}
}
}
dis.close();
}
bis.close();
} catch (IOException e1) {
logger.warning("Error reading height data from stream.");
return false;
}
return true;
}
use of com.jme3.scene.VertexBuffer.Format in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOpenVR method postRender.
@Override
public void postRender() {
if (environment != null) {
if (environment.isInVR()) {
VRAPI api = environment.getVRHardware();
if (api.getCompositor() != null) {
// using the compositor...
int errl = 0, errr = 0;
if (environment.isInstanceRendering()) {
if (leftTextureType.handle == -1 || leftTextureType.handle != getFullTexId()) {
leftTextureType.handle = getFullTexId();
if (leftTextureType.handle != -1) {
leftTextureType.write();
}
} else {
if (api instanceof OpenVR) {
int submitFlag = JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default;
errr = ((OpenVR) api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, leftTextureType, rightTextureBounds, submitFlag);
errl = ((OpenVR) api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, leftTextureBounds, submitFlag);
}
}
} else if (leftTextureType.handle == -1 || rightTextureType.handle == -1 || leftTextureType.handle != getLeftTexId() || rightTextureType.handle != getRightTexId()) {
leftTextureType.handle = getLeftTexId();
if (leftTextureType.handle != -1) {
logger.fine("Writing Left texture to native memory at " + leftTextureType.getPointer());
leftTextureType.write();
}
rightTextureType.handle = getRightTexId();
if (rightTextureType.handle != -1) {
logger.fine("Writing Right texture to native memory at " + leftTextureType.getPointer());
rightTextureType.write();
}
} else {
if (api instanceof OpenVR) {
errl = ((OpenVR) api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Left, leftTextureType, null, JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
errr = ((OpenVR) api).getCompositor().Submit.apply(JOpenVRLibrary.EVREye.EVREye_Eye_Right, rightTextureType, null, JOpenVRLibrary.EVRSubmitFlags.EVRSubmitFlags_Submit_Default);
} else {
}
}
if (errl != 0) {
logger.severe("Submit to left compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl) + " (" + Integer.toString(errl) + ")");
logger.severe(" Texture color space: " + OpenVRUtil.getEColorSpaceString(leftTextureType.eColorSpace));
logger.severe(" Texture type: " + OpenVRUtil.getETextureTypeString(leftTextureType.eType));
logger.severe(" Texture handle: " + leftTextureType.handle);
logger.severe(" Left eye texture " + leftEyeTexture.getName() + " (" + leftEyeTexture.getImage().getId() + ")");
logger.severe(" Type: " + leftEyeTexture.getType());
logger.severe(" Size: " + leftEyeTexture.getImage().getWidth() + "x" + leftEyeTexture.getImage().getHeight());
logger.severe(" Image depth: " + leftEyeTexture.getImage().getDepth());
logger.severe(" Image format: " + leftEyeTexture.getImage().getFormat());
logger.severe(" Image color space: " + leftEyeTexture.getImage().getColorSpace());
}
if (errr != 0) {
logger.severe("Submit to right compositor error: " + OpenVRUtil.getEVRCompositorErrorString(errl) + " (" + Integer.toString(errl) + ")");
logger.severe(" Texture color space: " + OpenVRUtil.getEColorSpaceString(rightTextureType.eColorSpace));
logger.severe(" Texture type: " + OpenVRUtil.getETextureTypeString(rightTextureType.eType));
logger.severe(" Texture handle: " + rightTextureType.handle);
logger.severe(" Right eye texture " + rightEyeTexture.getName() + " (" + rightEyeTexture.getImage().getId() + ")");
logger.severe(" Type: " + rightEyeTexture.getType());
logger.severe(" Size: " + rightEyeTexture.getImage().getWidth() + "x" + rightEyeTexture.getImage().getHeight());
logger.severe(" Image depth: " + rightEyeTexture.getImage().getDepth());
logger.severe(" Image format: " + rightEyeTexture.getImage().getFormat());
logger.severe(" Image color space: " + rightEyeTexture.getImage().getColorSpace());
}
}
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
use of com.jme3.scene.VertexBuffer.Format in project jmonkeyengine by jMonkeyEngine.
the class VRViewManagerOpenVR method setupFinalFullTexture.
private void setupFinalFullTexture(Camera cam) {
if (environment != null) {
if (environment.getApplication() != null) {
// create offscreen framebuffer
FrameBuffer out = new FrameBuffer(cam.getWidth(), cam.getHeight(), 1);
//offBuffer.setSrgb(true);
//setup framebuffer's texture
dualEyeTex = new Texture2D(cam.getWidth(), cam.getHeight(), Image.Format.RGBA8);
dualEyeTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
dualEyeTex.setMagFilter(Texture.MagFilter.Bilinear);
logger.config("Dual eye texture " + dualEyeTex.getName() + " (" + dualEyeTex.getImage().getId() + ")");
logger.config(" Type: " + dualEyeTex.getType());
logger.config(" Size: " + dualEyeTex.getImage().getWidth() + "x" + dualEyeTex.getImage().getHeight());
logger.config(" Image depth: " + dualEyeTex.getImage().getDepth());
logger.config(" Image format: " + dualEyeTex.getImage().getFormat());
logger.config(" Image color space: " + dualEyeTex.getImage().getColorSpace());
//setup framebuffer to use texture
out.setDepthBuffer(Image.Format.Depth);
out.setColorTexture(dualEyeTex);
ViewPort viewPort = environment.getApplication().getViewPort();
viewPort.setClearFlags(true, true, true);
viewPort.setBackgroundColor(ColorRGBA.Black);
viewPort.setOutputFrameBuffer(out);
} else {
throw new IllegalStateException("This VR environment is not attached to any application.");
}
} else {
throw new IllegalStateException("This VR view manager is not attached to any VR environment.");
}
}
Aggregations