use of java.nio.IntBuffer in project MagicCamera by wuhaoyu1990.
the class MagicCameraView method drawPhoto.
private Bitmap drawPhoto(Bitmap bitmap, boolean isRotated) {
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int[] mFrameBuffers = new int[1];
int[] mFrameBufferTextures = new int[1];
if (beautyFilter == null)
beautyFilter = new MagicBeautyFilter();
beautyFilter.init();
beautyFilter.onDisplaySizeChanged(width, height);
beautyFilter.onInputSizeChanged(width, height);
if (filter != null) {
filter.onInputSizeChanged(width, height);
filter.onDisplaySizeChanged(width, height);
}
GLES20.glGenFramebuffers(1, mFrameBuffers, 0);
GLES20.glGenTextures(1, mFrameBufferTextures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0);
GLES20.glViewport(0, 0, width, height);
int textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);
FloatBuffer gLCubeBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.CUBE.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
FloatBuffer gLTextureBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
gLCubeBuffer.put(TextureRotationUtil.CUBE).position(0);
if (isRotated)
gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.NORMAL, false, false)).position(0);
else
gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.NORMAL, false, true)).position(0);
if (filter == null) {
beautyFilter.onDrawFrame(textureId, gLCubeBuffer, gLTextureBuffer);
} else {
beautyFilter.onDrawFrame(textureId);
filter.onDrawFrame(mFrameBufferTextures[0], gLCubeBuffer, gLTextureBuffer);
}
IntBuffer ib = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
Bitmap result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
result.copyPixelsFromBuffer(ib);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteTextures(1, new int[] { textureId }, 0);
GLES20.glDeleteFramebuffers(mFrameBuffers.length, mFrameBuffers, 0);
GLES20.glDeleteTextures(mFrameBufferTextures.length, mFrameBufferTextures, 0);
beautyFilter.destroy();
beautyFilter = null;
if (filter != null) {
filter.onDisplaySizeChanged(surfaceWidth, surfaceHeight);
filter.onInputSizeChanged(imageWidth, imageHeight);
}
return result;
}
use of java.nio.IntBuffer in project MagicCamera by wuhaoyu1990.
the class EglSurfaceBase method saveFrame.
/**
* Saves the EGL surface to a file.
* <p>
* Expects that this object's EGL surface is current.
*/
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("Expected EGL context/surface is not current");
}
// glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
// constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
// Bitmap "copy pixels" method wants the same format GL provides.
//
// Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
// here often.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside down relative to what appears on screen if the
// typical GL conventions are used.
String filename = file.toString();
int width = getWidth();
int height = getHeight();
IntBuffer ib = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
OpenGlUtils.checkGlError("glReadPixels");
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(IntBuffer.wrap(ib.array()));
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null)
bos.close();
}
Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
use of java.nio.IntBuffer in project XobotOS by xamarin.
the class MediaItem method generateBlankFrame.
/**
* Generates a black frame to be used for generating
* begin transition at first media item in storyboard
* or end transition at last media item in storyboard
*
* @param ClipSettings object
*{@link android.media.videoeditor.MediaArtistNativeHelper.ClipSettings}
*/
void generateBlankFrame(ClipSettings clipSettings) {
if (!mBlankFrameGenerated) {
int mWidth = 64;
int mHeight = 64;
mBlankFrameFilename = String.format(mProjectPath + "/" + "ghost.rgb");
FileOutputStream fl = null;
try {
fl = new FileOutputStream(mBlankFrameFilename);
} catch (IOException e) {
/* catch IO exception */
}
final DataOutputStream dos = new DataOutputStream(fl);
final int[] framingBuffer = new int[mWidth];
ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
IntBuffer intBuffer;
byte[] array = byteBuffer.array();
int tmp = 0;
while (tmp < mHeight) {
intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(framingBuffer, 0, mWidth);
try {
dos.write(array);
} catch (IOException e) {
/* catch file write error */
}
tmp += 1;
}
try {
fl.close();
} catch (IOException e) {
/* file close error */
}
mBlankFrameGenerated = true;
}
clipSettings.clipPath = mBlankFrameFilename;
clipSettings.fileType = FileType.JPG;
clipSettings.beginCutTime = 0;
clipSettings.endCutTime = 0;
clipSettings.mediaRendering = MediaRendering.RESIZING;
clipSettings.rgbWidth = 64;
clipSettings.rgbHeight = 64;
}
use of java.nio.IntBuffer in project XobotOS by xamarin.
the class OverlayFrame method generateOverlayWithRenderingMode.
void generateOverlayWithRenderingMode(MediaItem mediaItemsList, OverlayFrame overlay, int height, int width) throws FileNotFoundException, IOException {
final MediaItem t = mediaItemsList;
/* get the rendering mode */
int renderMode = t.getRenderingMode();
Bitmap overlayBitmap = ((OverlayFrame) overlay).getBitmap();
/*
* Check if the resize of Overlay is needed with rendering mode applied
* because of change in export dimensions
*/
int resizedRGBFileHeight = ((OverlayFrame) overlay).getResizedRGBSizeHeight();
int resizedRGBFileWidth = ((OverlayFrame) overlay).getResizedRGBSizeWidth();
/* Get original bitmap width if it is not resized */
if (resizedRGBFileWidth == 0) {
resizedRGBFileWidth = overlayBitmap.getWidth();
}
/* Get original bitmap height if it is not resized */
if (resizedRGBFileHeight == 0) {
resizedRGBFileHeight = overlayBitmap.getHeight();
}
if (resizedRGBFileWidth != width || resizedRGBFileHeight != height || (!(new File(((OverlayFrame) overlay).getFilename()).exists()))) {
/*
* Create the canvas bitmap
*/
final Bitmap destBitmap = Bitmap.createBitmap((int) width, (int) height, Bitmap.Config.ARGB_8888);
final Canvas overlayCanvas = new Canvas(destBitmap);
final Rect destRect;
final Rect srcRect;
switch(renderMode) {
case MediaItem.RENDERING_MODE_STRETCH:
{
destRect = new Rect(0, 0, overlayCanvas.getWidth(), overlayCanvas.getHeight());
srcRect = new Rect(0, 0, overlayBitmap.getWidth(), overlayBitmap.getHeight());
break;
}
case MediaItem.RENDERING_MODE_BLACK_BORDER:
{
int left, right, top, bottom;
float aROverlayImage, aRCanvas;
aROverlayImage = (float) (overlayBitmap.getWidth()) / (float) (overlayBitmap.getHeight());
aRCanvas = (float) (overlayCanvas.getWidth()) / (float) (overlayCanvas.getHeight());
if (aROverlayImage > aRCanvas) {
int newHeight = ((overlayCanvas.getWidth() * overlayBitmap.getHeight()) / overlayBitmap.getWidth());
left = 0;
top = (overlayCanvas.getHeight() - newHeight) / 2;
right = overlayCanvas.getWidth();
bottom = top + newHeight;
} else {
int newWidth = ((overlayCanvas.getHeight() * overlayBitmap.getWidth()) / overlayBitmap.getHeight());
left = (overlayCanvas.getWidth() - newWidth) / 2;
top = 0;
right = left + newWidth;
bottom = overlayCanvas.getHeight();
}
destRect = new Rect(left, top, right, bottom);
srcRect = new Rect(0, 0, overlayBitmap.getWidth(), overlayBitmap.getHeight());
break;
}
case MediaItem.RENDERING_MODE_CROPPING:
{
// Calculate the source rect
int left, right, top, bottom;
float aROverlayImage, aRCanvas;
aROverlayImage = (float) (overlayBitmap.getWidth()) / (float) (overlayBitmap.getHeight());
aRCanvas = (float) (overlayCanvas.getWidth()) / (float) (overlayCanvas.getHeight());
if (aROverlayImage < aRCanvas) {
int newHeight = ((overlayBitmap.getWidth() * overlayCanvas.getHeight()) / overlayCanvas.getWidth());
left = 0;
top = (overlayBitmap.getHeight() - newHeight) / 2;
right = overlayBitmap.getWidth();
bottom = top + newHeight;
} else {
int newWidth = ((overlayBitmap.getHeight() * overlayCanvas.getWidth()) / overlayCanvas.getHeight());
left = (overlayBitmap.getWidth() - newWidth) / 2;
top = 0;
right = left + newWidth;
bottom = overlayBitmap.getHeight();
}
srcRect = new Rect(left, top, right, bottom);
destRect = new Rect(0, 0, overlayCanvas.getWidth(), overlayCanvas.getHeight());
break;
}
default:
{
throw new IllegalStateException("Rendering mode: " + renderMode);
}
}
overlayCanvas.drawBitmap(overlayBitmap, srcRect, destRect, sResizePaint);
overlayCanvas.setBitmap(null);
/*
* Write to the dest file
*/
String outFileName = ((OverlayFrame) overlay).getFilename();
/*
* Save the image to same rgb file
*/
if (outFileName != null) {
new File(outFileName).delete();
}
final FileOutputStream fl = new FileOutputStream(outFileName);
final DataOutputStream dos = new DataOutputStream(fl);
/*
* Populate the rgb file with bitmap data
*/
final int[] framingBuffer = new int[width];
ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
IntBuffer intBuffer;
byte[] array = byteBuffer.array();
int tmp = 0;
while (tmp < height) {
destBitmap.getPixels(framingBuffer, 0, width, 0, tmp, width, 1);
intBuffer = byteBuffer.asIntBuffer();
intBuffer.put(framingBuffer, 0, width);
dos.write(array);
tmp += 1;
}
fl.flush();
fl.close();
/*
* Set the resized RGB width and height
*/
((OverlayFrame) overlay).setResizedRGBSize(width, height);
}
}
use of java.nio.IntBuffer in project android_frameworks_base by AOSPA.
the class GLLogWrapper method toByteBuffer.
private ByteBuffer toByteBuffer(int byteCount, Buffer input) {
ByteBuffer result = null;
boolean convertWholeBuffer = (byteCount < 0);
if (input instanceof ByteBuffer) {
ByteBuffer input2 = (ByteBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = input2.limit() - position;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
for (int i = 0; i < byteCount; i++) {
result.put(input2.get());
}
input2.position(position);
} else if (input instanceof CharBuffer) {
CharBuffer input2 = (CharBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 2;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
CharBuffer result2 = result.asCharBuffer();
for (int i = 0; i < byteCount / 2; i++) {
result2.put(input2.get());
}
input2.position(position);
} else if (input instanceof ShortBuffer) {
ShortBuffer input2 = (ShortBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 2;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
ShortBuffer result2 = result.asShortBuffer();
for (int i = 0; i < byteCount / 2; i++) {
result2.put(input2.get());
}
input2.position(position);
} else if (input instanceof IntBuffer) {
IntBuffer input2 = (IntBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 4;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
IntBuffer result2 = result.asIntBuffer();
for (int i = 0; i < byteCount / 4; i++) {
result2.put(input2.get());
}
input2.position(position);
} else if (input instanceof FloatBuffer) {
FloatBuffer input2 = (FloatBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 4;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
FloatBuffer result2 = result.asFloatBuffer();
for (int i = 0; i < byteCount / 4; i++) {
result2.put(input2.get());
}
input2.position(position);
} else if (input instanceof DoubleBuffer) {
DoubleBuffer input2 = (DoubleBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 8;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
DoubleBuffer result2 = result.asDoubleBuffer();
for (int i = 0; i < byteCount / 8; i++) {
result2.put(input2.get());
}
input2.position(position);
} else if (input instanceof LongBuffer) {
LongBuffer input2 = (LongBuffer) input;
int position = input2.position();
if (convertWholeBuffer) {
byteCount = (input2.limit() - position) * 8;
}
result = ByteBuffer.allocate(byteCount).order(input2.order());
LongBuffer result2 = result.asLongBuffer();
for (int i = 0; i < byteCount / 8; i++) {
result2.put(input2.get());
}
input2.position(position);
} else {
throw new RuntimeException("Unimplemented Buffer subclass.");
}
result.rewind();
// The OpenGL API will interpret the result in hardware byte order,
// so we better do that as well:
result.order(ByteOrder.nativeOrder());
return result;
}
Aggregations