use of java.nio.BufferUnderflowException in project jmonkeyengine by jMonkeyEngine.
the class LODGeomap method writeTexCoordArray.
public FloatBuffer writeTexCoordArray(FloatBuffer store, Vector2f offset, Vector2f scale, float offsetAmount, int totalSize) {
if (store != null) {
if (store.remaining() < getWidth() * getHeight() * 2) {
throw new BufferUnderflowException();
}
} else {
store = BufferUtils.createFloatBuffer(getWidth() * getHeight() * 2);
}
if (offset == null) {
offset = new Vector2f();
}
Vector2f tcStore = new Vector2f();
// work from bottom of heightmap up, so we don't flip the coords
for (int y = getHeight() - 1; y >= 0; y--) {
for (int x = 0; x < getWidth(); x++) {
getUV(x, y, tcStore, offset, offsetAmount, totalSize);
float tx = tcStore.x * scale.x;
float ty = tcStore.y * scale.y;
store.put(tx);
store.put(ty);
}
}
return store;
}
use of java.nio.BufferUnderflowException in project jmonkeyengine by jMonkeyEngine.
the class LODGeomap method writeNormalArray.
@Override
public FloatBuffer writeNormalArray(FloatBuffer store, Vector3f scale) {
if (!isLoaded()) {
throw new NullPointerException();
}
if (store != null) {
if (store.remaining() < getWidth() * getHeight() * 3) {
throw new BufferUnderflowException();
}
} else {
store = BufferUtils.createFloatBuffer(getWidth() * getHeight() * 3);
}
store.rewind();
TempVars vars = TempVars.get();
Vector3f rootPoint = vars.vect1;
Vector3f rightPoint = vars.vect2;
Vector3f leftPoint = vars.vect3;
Vector3f topPoint = vars.vect4;
Vector3f bottomPoint = vars.vect5;
Vector3f tmp1 = vars.vect6;
// calculate normals for each polygon
for (int r = 0; r < getHeight(); r++) {
for (int c = 0; c < getWidth(); c++) {
rootPoint.set(0, getValue(c, r), 0);
Vector3f normal = vars.vect8;
if (r == 0) {
// first row
if (c == 0) {
// first column
rightPoint.set(1, getValue(c + 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
getNormal(bottomPoint, rootPoint, rightPoint, scale, normal);
} else if (c == getWidth() - 1) {
// last column
leftPoint.set(-1, getValue(c - 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
getNormal(leftPoint, rootPoint, bottomPoint, scale, normal);
} else {
// all middle columns
leftPoint.set(-1, getValue(c - 1, r), 0);
rightPoint.set(1, getValue(c + 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
normal.set(getNormal(leftPoint, rootPoint, bottomPoint, scale, tmp1));
normal.addLocal(getNormal(bottomPoint, rootPoint, rightPoint, scale, tmp1));
}
} else if (r == getHeight() - 1) {
// last row
if (c == 0) {
// first column
topPoint.set(0, getValue(c, r - 1), -1);
rightPoint.set(1, getValue(c + 1, r), 0);
getNormal(rightPoint, rootPoint, topPoint, scale, normal);
} else if (c == getWidth() - 1) {
// last column
topPoint.set(0, getValue(c, r - 1), -1);
leftPoint.set(-1, getValue(c - 1, r), 0);
getNormal(topPoint, rootPoint, leftPoint, scale, normal);
} else {
// all middle columns
topPoint.set(0, getValue(c, r - 1), -1);
leftPoint.set(-1, getValue(c - 1, r), 0);
rightPoint.set(1, getValue(c + 1, r), 0);
normal.set(getNormal(topPoint, rootPoint, leftPoint, scale, tmp1));
normal.addLocal(getNormal(rightPoint, rootPoint, topPoint, scale, tmp1));
}
} else {
// all middle rows
if (c == 0) {
// first column
topPoint.set(0, getValue(c, r - 1), -1);
rightPoint.set(1, getValue(c + 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
normal.set(getNormal(rightPoint, rootPoint, topPoint, scale, tmp1));
normal.addLocal(getNormal(bottomPoint, rootPoint, rightPoint, scale, tmp1));
} else if (c == getWidth() - 1) {
// last column
topPoint.set(0, getValue(c, r - 1), -1);
leftPoint.set(-1, getValue(c - 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
normal.set(getNormal(topPoint, rootPoint, leftPoint, scale, tmp1));
normal.addLocal(getNormal(leftPoint, rootPoint, bottomPoint, scale, tmp1));
} else {
// all middle columns
topPoint.set(0, getValue(c, r - 1), -1);
leftPoint.set(-1, getValue(c - 1, r), 0);
rightPoint.set(1, getValue(c + 1, r), 0);
bottomPoint.set(0, getValue(c, r + 1), 1);
normal.set(getNormal(topPoint, rootPoint, leftPoint, scale, tmp1));
normal.addLocal(getNormal(leftPoint, rootPoint, bottomPoint, scale, tmp1));
normal.addLocal(getNormal(bottomPoint, rootPoint, rightPoint, scale, tmp1));
normal.addLocal(getNormal(rightPoint, rootPoint, topPoint, scale, tmp1));
}
}
normal.normalizeLocal();
// save the normal
BufferUtils.setInBuffer(normal, store, (r * getWidth() + c));
}
}
vars.release();
return store;
}
use of java.nio.BufferUnderflowException in project jmonkeyengine by jMonkeyEngine.
the class GeoMap method writeIndexArray.
public IntBuffer writeIndexArray(IntBuffer store) {
int faceN = (getWidth() - 1) * (getHeight() - 1) * 2;
if (store != null) {
if (store.remaining() < faceN * 3)
throw new BufferUnderflowException();
} else {
store = BufferUtils.createIntBuffer(faceN * 3);
}
int i = 0;
for (int z = 0; z < getHeight() - 1; z++) {
for (int x = 0; x < getWidth() - 1; x++) {
store.put(i).put(i + getWidth()).put(i + getWidth() + 1);
store.put(i + getWidth() + 1).put(i + 1).put(i);
i++;
// TODO: There's probably a better way to do this..
if (x == getWidth() - 2)
i++;
}
}
store.flip();
return store;
}
use of java.nio.BufferUnderflowException in project jmonkeyengine by jMonkeyEngine.
the class GeoMap method writeNormalArray.
/**
* Creates a normal array from the normal data in this Geomap
*
* @param store A preallocated FloatBuffer where to store the data (optional), size must be >= getWidth()*getHeight()*3
* @return store, or a new FloatBuffer if store is null
*
* @throws NullPointerException If isLoaded() or hasNormalmap() is false
*/
public FloatBuffer writeNormalArray(FloatBuffer store, Vector3f scale) {
if (store != null) {
if (store.remaining() < getWidth() * getHeight() * 3)
throw new BufferUnderflowException();
} else {
store = BufferUtils.createFloatBuffer(getWidth() * getHeight() * 3);
}
store.rewind();
Vector3f oppositePoint = new Vector3f();
Vector3f adjacentPoint = new Vector3f();
Vector3f rootPoint = new Vector3f();
Vector3f tempNorm = new Vector3f();
int normalIndex = 0;
for (int y = 0; y < getHeight(); y++) {
for (int x = 0; x < getWidth(); x++) {
rootPoint.set(x, getValue(x, y), y);
if (y == getHeight() - 1) {
if (x == getWidth() - 1) {
// case #4 : last row, last col
// left cross up
// adj = normalIndex - getWidth();
// opp = normalIndex - 1;
adjacentPoint.set(x, getValue(x, y - 1), y - 1);
oppositePoint.set(x - 1, getValue(x - 1, y), y);
} else {
// case #3 : last row, except for last col
// right cross up
// adj = normalIndex + 1;
// opp = normalIndex - getWidth();
adjacentPoint.set(x + 1, getValue(x + 1, y), y);
oppositePoint.set(x, getValue(x, y - 1), y - 1);
}
} else {
if (x == getWidth() - 1) {
// case #2 : last column except for last row
// left cross down
adjacentPoint.set(x - 1, getValue(x - 1, y), y);
oppositePoint.set(x, getValue(x, y + 1), y + 1);
// adj = normalIndex - 1;
// opp = normalIndex + getWidth();
} else {
// case #1 : most cases
// right cross down
adjacentPoint.set(x, getValue(x, y + 1), y + 1);
oppositePoint.set(x + 1, getValue(x + 1, y), y);
// adj = normalIndex + getWidth();
// opp = normalIndex + 1;
}
}
tempNorm.set(adjacentPoint).subtractLocal(rootPoint).crossLocal(oppositePoint.subtractLocal(rootPoint));
tempNorm.multLocal(scale).normalizeLocal();
// store.put(tempNorm.x).put(tempNorm.y).put(tempNorm.z);
BufferUtils.setInBuffer(tempNorm, store, normalIndex);
normalIndex++;
}
}
return store;
}
use of java.nio.BufferUnderflowException in project j2objc by google.
the class Zip64 method parseZip64ExtendedInfo.
/**
* Parse the zip64 extended info record from the extras present in {@code ze}.
*
* If {@code fromCentralDirectory} is true, we assume we're parsing a central directory
* record. We assume a local file header otherwise. The difference between the two is that
* a central directory entry is required to be complete, whereas a local file header isn't.
* This is due to the presence of an optional data descriptor after the file content.
*
* @return {@code} true iff. a zip64 extended info record was found.
*/
public static boolean parseZip64ExtendedInfo(ZipEntry ze, boolean fromCentralDirectory) throws ZipException {
int extendedInfoSize = -1;
int extendedInfoStart = -1;
// optionally contain a zip64 extended information extra entry.
if (ze.extra != null && ze.extra.length > 0) {
// Extensible data fields are of the form header1+data1 + header2+data2 and so
// on, where each header consists of a 2 byte header ID followed by a 2 byte size.
// We need to iterate through the entire list of headers to find the header ID
// for the zip64 extended information extra field (0x0001).
final ByteBuffer buf = ByteBuffer.wrap(ze.extra).order(ByteOrder.LITTLE_ENDIAN);
extendedInfoSize = getZip64ExtendedInfoSize(buf);
if (extendedInfoSize != -1) {
extendedInfoStart = buf.position();
try {
// The spec claims that the order of fields is fixed anyway.
if (fromCentralDirectory || (ze.getMethod() == ZipEntry.STORED)) {
if (ze.size == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE) {
ze.size = buf.getLong();
}
if (ze.compressedSize == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE) {
ze.compressedSize = buf.getLong();
}
}
// sense within the local header itself.
if (fromCentralDirectory) {
if (ze.localHeaderRelOffset == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE) {
ze.localHeaderRelOffset = buf.getLong();
}
}
} catch (BufferUnderflowException bue) {
ZipException zipException = new ZipException("Error parsing extended info");
zipException.initCause(bue);
throw zipException;
}
}
}
// are valid and don't require the presence of the extended header.
if (extendedInfoSize == -1) {
if (ze.compressedSize == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE || ze.size == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE || ze.localHeaderRelOffset == MAX_ZIP_ENTRY_AND_ARCHIVE_SIZE) {
throw new ZipException("File contains no zip64 extended information: " + "name=" + ze.name + "compressedSize=" + ze.compressedSize + ", size=" + ze.size + ", localHeader=" + ze.localHeaderRelOffset);
}
return false;
} else {
// If we're parsed the zip64 extended info header, we remove it from the extras
// so that applications that set their own extras will see the data they set.
// This is an unfortunate workaround needed due to a gap in the spec. The spec demands
// that extras are present in the "extensible" format, which means that each extra field
// must be prefixed with a header ID and a length. However, earlier versions of the spec
// made no mention of this, nor did any existing API enforce it. This means users could
// set "free form" extras without caring very much whether the implementation wanted to
// extend or add to them.
// The start of the extended info header.
final int extendedInfoHeaderStart = extendedInfoStart - 4;
// The total size of the extended info, including the header.
final int extendedInfoTotalSize = extendedInfoSize + 4;
final int extrasLen = ze.extra.length - extendedInfoTotalSize;
byte[] extrasWithoutZip64 = new byte[extrasLen];
System.arraycopy(ze.extra, 0, extrasWithoutZip64, 0, extendedInfoHeaderStart);
System.arraycopy(ze.extra, extendedInfoHeaderStart + extendedInfoTotalSize, extrasWithoutZip64, extendedInfoHeaderStart, (extrasLen - extendedInfoHeaderStart));
ze.extra = extrasWithoutZip64;
return true;
}
}
Aggregations