use of processing.core.PGraphics in project processing by processing.
the class PShapeOpenGL method rawPoints.
protected void rawPoints(PGraphicsOpenGL g) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noFill();
raw.strokeCap(strokeCap);
raw.beginShape(POINTS);
float[] vertices = tessGeo.pointVertices;
int[] color = tessGeo.pointColors;
float[] attribs = tessGeo.pointOffsets;
short[] indices = tessGeo.pointIndices;
IndexCache cache = tessGeo.pointIndexCache;
for (int n = 0; n < cache.size; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
int pt = ioffset;
while (pt < (ioffset + icount) / 3) {
float size = attribs[2 * pt + 2];
float weight;
int perim;
if (0 < size) {
// round point
weight = +size / 0.5f;
perim = PApplet.min(PGraphicsOpenGL.MAX_POINT_ACCURACY, PApplet.max(PGraphicsOpenGL.MIN_POINT_ACCURACY, (int) (TWO_PI * weight / PGraphicsOpenGL.POINT_ACCURACY_FACTOR))) + 1;
} else {
// Square point
weight = -size / 0.5f;
perim = 5;
}
int i0 = voffset + indices[3 * pt];
int argb0 = PGL.nativeToJavaARGB(color[i0]);
float[] pt0 = { 0, 0, 0, 0 };
float[] src0 = { 0, 0, 0, 0 };
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
g.modelview.mult(src0, pt0);
if (raw.is3D()) {
raw.strokeWeight(weight);
raw.stroke(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
raw.strokeWeight(weight);
raw.stroke(argb0);
raw.vertex(sx0, sy0);
}
pt += perim;
}
}
raw.endShape();
}
use of processing.core.PGraphics in project processing by processing.
the class PShapeOpenGL method rawPolys.
protected void rawPolys(PGraphicsOpenGL g, PImage textureImage) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noStroke();
raw.beginShape(TRIANGLES);
float[] vertices = tessGeo.polyVertices;
int[] color = tessGeo.polyColors;
float[] uv = tessGeo.polyTexCoords;
short[] indices = tessGeo.polyIndices;
IndexCache cache = tessGeo.polyIndexCache;
for (int n = firstPolyIndexCache; n <= lastPolyIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int tr = ioffset / 3; tr < (ioffset + icount) / 3; tr++) {
int i0 = voffset + indices[3 * tr + 0];
int i1 = voffset + indices[3 * tr + 1];
int i2 = voffset + indices[3 * tr + 2];
float[] src0 = { 0, 0, 0, 0 };
float[] src1 = { 0, 0, 0, 0 };
float[] src2 = { 0, 0, 0, 0 };
float[] pt0 = { 0, 0, 0, 0 };
float[] pt1 = { 0, 0, 0, 0 };
float[] pt2 = { 0, 0, 0, 0 };
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
int argb2 = PGL.nativeToJavaARGB(color[i2]);
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
PApplet.arrayCopy(vertices, 4 * i1, src1, 0, 4);
PApplet.arrayCopy(vertices, 4 * i2, src2, 0, 4);
// Applying any transformation is currently stored in the
// modelview matrix of the renderer.
g.modelview.mult(src0, pt0);
g.modelview.mult(src1, pt1);
g.modelview.mult(src2, pt2);
if (textureImage != null) {
raw.texture(textureImage);
if (raw.is3D()) {
raw.fill(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z], uv[2 * i0 + 0], uv[2 * i0 + 1]);
raw.fill(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z], uv[2 * i1 + 0], uv[2 * i1 + 1]);
raw.fill(argb2);
raw.vertex(pt2[X], pt2[Y], pt2[Z], uv[2 * i2 + 0], uv[2 * i2 + 1]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sx2 = g.screenXImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
float sy2 = g.screenYImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
raw.fill(argb0);
raw.vertex(sx0, sy0, uv[2 * i0 + 0], uv[2 * i0 + 1]);
raw.fill(argb1);
raw.vertex(sx1, sy1, uv[2 * i1 + 0], uv[2 * i1 + 1]);
raw.fill(argb1);
raw.vertex(sx2, sy2, uv[2 * i2 + 0], uv[2 * i2 + 1]);
}
} else {
if (raw.is3D()) {
raw.fill(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
raw.fill(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z]);
raw.fill(argb2);
raw.vertex(pt2[X], pt2[Y], pt2[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sx2 = g.screenXImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
float sy2 = g.screenYImpl(pt2[0], pt2[1], pt2[2], pt2[3]);
raw.fill(argb0);
raw.vertex(sx0, sy0);
raw.fill(argb1);
raw.vertex(sx1, sy1);
raw.fill(argb2);
raw.vertex(sx2, sy2);
}
}
}
}
raw.endShape();
}
use of processing.core.PGraphics in project hid-serial by rayshobby.
the class GCustomSlider method extendCentreImage.
private void extendCentreImage() {
int tl = (int) trackLength;
PGraphics pg = winApp.createGraphics(tl, centre.height, JAVA2D);
int rem = tl % centre.width;
int n = tl / centre.width;
n = (rem == 0) ? n : n + 1;
int px = (tl - centre.width * n) / 2;
pg.beginDraw();
pg.background(winApp.color(255, 0));
pg.imageMode(CORNER);
while (px < tl) {
pg.image(centre, px, 0);
px += centre.width;
}
pg.endDraw();
centre = pg;
}
use of processing.core.PGraphics in project processing by processing.
the class PShapeOpenGL method rawLines.
protected void rawLines(PGraphicsOpenGL g) {
PGraphics raw = g.getRaw();
raw.colorMode(RGB);
raw.noFill();
raw.strokeCap(strokeCap);
raw.strokeJoin(strokeJoin);
raw.beginShape(LINES);
float[] vertices = tessGeo.lineVertices;
int[] color = tessGeo.lineColors;
float[] attribs = tessGeo.lineDirections;
short[] indices = tessGeo.lineIndices;
IndexCache cache = tessGeo.lineIndexCache;
for (int n = firstLineIndexCache; n <= lastLineIndexCache; n++) {
int ioffset = cache.indexOffset[n];
int icount = cache.indexCount[n];
int voffset = cache.vertexOffset[n];
for (int ln = ioffset / 6; ln < (ioffset + icount) / 6; ln++) {
// Each line segment is defined by six indices since its
// formed by two triangles. We only need the first and last
// vertices.
// This bunch of vertices could also be the bevel triangles,
// with we detect this situation by looking at the line weight.
int i0 = voffset + indices[6 * ln + 0];
int i1 = voffset + indices[6 * ln + 5];
float sw0 = 2 * attribs[4 * i0 + 3];
float sw1 = 2 * attribs[4 * i1 + 3];
// Bevel triangles, skip.
if (PGraphicsOpenGL.zero(sw0))
continue;
float[] src0 = { 0, 0, 0, 0 };
float[] src1 = { 0, 0, 0, 0 };
float[] pt0 = { 0, 0, 0, 0 };
float[] pt1 = { 0, 0, 0, 0 };
int argb0 = PGL.nativeToJavaARGB(color[i0]);
int argb1 = PGL.nativeToJavaARGB(color[i1]);
PApplet.arrayCopy(vertices, 4 * i0, src0, 0, 4);
PApplet.arrayCopy(vertices, 4 * i1, src1, 0, 4);
// Applying any transformation is currently stored in the
// modelview matrix of the renderer.
g.modelview.mult(src0, pt0);
g.modelview.mult(src1, pt1);
if (raw.is3D()) {
raw.strokeWeight(sw0);
raw.stroke(argb0);
raw.vertex(pt0[X], pt0[Y], pt0[Z]);
raw.strokeWeight(sw1);
raw.stroke(argb1);
raw.vertex(pt1[X], pt1[Y], pt1[Z]);
} else if (raw.is2D()) {
float sx0 = g.screenXImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sy0 = g.screenYImpl(pt0[0], pt0[1], pt0[2], pt0[3]);
float sx1 = g.screenXImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
float sy1 = g.screenYImpl(pt1[0], pt1[1], pt1[2], pt1[3]);
raw.strokeWeight(sw0);
raw.stroke(argb0);
raw.vertex(sx0, sy0);
raw.strokeWeight(sw1);
raw.stroke(argb1);
raw.vertex(sx1, sy1);
}
}
}
raw.endShape();
}
Aggregations