use of android.graphics.SurfaceTexture in project chromeview by pwnall.
the class VideoCapture method allocate.
// Returns true on success, false otherwise.
@CalledByNative
public boolean allocate(int width, int height, int frameRate) {
Log.d(TAG, "allocate: requested width=" + width + ", height=" + height + ", frameRate=" + frameRate);
try {
mCamera = Camera.open(mId);
Camera.CameraInfo camera_info = new Camera.CameraInfo();
Camera.getCameraInfo(mId, camera_info);
mCameraOrientation = camera_info.orientation;
mCameraFacing = camera_info.facing;
mDeviceOrientation = getDeviceOrientation();
Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + ", camera orientation=" + mCameraOrientation + ", facing=" + mCameraFacing);
Camera.Parameters parameters = mCamera.getParameters();
// Calculate fps.
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
int frameRateInMs = frameRate * 1000;
boolean fpsIsSupported = false;
int fpsMin = 0;
int fpsMax = 0;
Iterator itFpsRange = listFpsRange.iterator();
while (itFpsRange.hasNext()) {
int[] fpsRange = (int[]) itFpsRange.next();
if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
fpsIsSupported = true;
fpsMin = fpsRange[0];
fpsMax = fpsRange[1];
break;
}
}
if (!fpsIsSupported) {
Log.e(TAG, "allocate: fps " + frameRate + " is not supported");
return false;
}
mCurrentCapability = new CaptureCapability();
mCurrentCapability.mDesiredFps = frameRate;
// Calculate size.
List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes();
int minDiff = Integer.MAX_VALUE;
int matchedWidth = width;
int matchedHeight = height;
Iterator itCameraSize = listCameraSize.iterator();
while (itCameraSize.hasNext()) {
Camera.Size size = (Camera.Size) itCameraSize.next();
int diff = Math.abs(size.width - width) + Math.abs(size.height - height);
Log.d(TAG, "allocate: support resolution (" + size.width + ", " + size.height + "), diff=" + diff);
// (i.e., with no padding).
if (diff < minDiff && (size.width % 32 == 0)) {
minDiff = diff;
matchedWidth = size.width;
matchedHeight = size.height;
}
}
if (minDiff == Integer.MAX_VALUE) {
Log.e(TAG, "allocate: can not find a resolution whose width " + "is multiple of 32");
return false;
}
mCurrentCapability.mWidth = matchedWidth;
mCurrentCapability.mHeight = matchedHeight;
Log.d(TAG, "allocate: matched width=" + matchedWidth + ", height=" + matchedHeight);
parameters.setPreviewSize(matchedWidth, matchedHeight);
parameters.setPreviewFormat(mPixelFormat);
parameters.setPreviewFpsRange(fpsMin, fpsMax);
mCamera.setParameters(parameters);
// Set SurfaceTexture.
mGlTextures = new int[1];
// Generate one texture pointer and bind it as an external texture.
GLES20.glGenTextures(1, mGlTextures, 0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
// No mip-mapping with camera source.
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// Clamp to edge is only option.
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
mSurfaceTexture.setOnFrameAvailableListener(null);
mCamera.setPreviewTexture(mSurfaceTexture);
int bufSize = matchedWidth * matchedHeight * ImageFormat.getBitsPerPixel(mPixelFormat) / 8;
for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
byte[] buffer = new byte[bufSize];
mCamera.addCallbackBuffer(buffer);
}
mExpectedFrameSize = bufSize;
} catch (IOException ex) {
Log.e(TAG, "allocate: " + ex);
return false;
}
return true;
}
use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.
the class CameraSource method open.
@Override
public void open(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Opening");
// Open camera
mCamera = Camera.open(mCameraId);
// Set parameters
getCameraParameters();
mCamera.setParameters(mCameraParameters);
// Create frame formats
createFormats();
// Bind it to our camera frame
mCameraFrame = (GLFrame) context.getFrameManager().newBoundFrame(mOutputFormat, GLFrame.EXTERNAL_TEXTURE, 0);
mSurfaceTexture = new SurfaceTexture(mCameraFrame.getTextureId());
try {
mCamera.setPreviewTexture(mSurfaceTexture);
} catch (IOException e) {
throw new RuntimeException("Could not bind camera surface texture: " + e.getMessage() + "!");
}
// Connect SurfaceTexture to callback
mSurfaceTexture.setOnFrameAvailableListener(onCameraFrameAvailableListener);
// Start the preview
mNewFrameAvailable = false;
mCamera.startPreview();
}
use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.
the class CameraDeviceBinderTest method testCreateStreamTwo.
@SmallTest
public void testCreateStreamTwo() throws Exception {
// Create first stream
int streamId = mCameraUser.createStream(mOutputConfiguration);
assertEquals(0, streamId);
try {
mCameraUser.createStream(mOutputConfiguration);
fail("Created same stream twice");
} catch (ServiceSpecificException e) {
assertEquals("Created same stream twice", ICameraService.ERROR_ALREADY_EXISTS, e.errorCode);
}
// Create second stream with a different surface.
SurfaceTexture surfaceTexture = new SurfaceTexture(/* ignored */
0);
surfaceTexture.setDefaultBufferSize(640, 480);
Surface surface2 = new Surface(surfaceTexture);
OutputConfiguration output2 = new OutputConfiguration(surface2);
int streamId2 = mCameraUser.createStream(output2);
assertEquals(1, streamId2);
// Clean up streams
mCameraUser.deleteStream(streamId);
mCameraUser.deleteStream(streamId2);
}
use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.
the class EGL14 method eglCreateWindowSurface.
public static EGLSurface eglCreateWindowSurface(EGLDisplay dpy, EGLConfig config, Object win, int[] attrib_list, int offset) {
Surface sur = null;
if (win instanceof SurfaceView) {
SurfaceView surfaceView = (SurfaceView) win;
sur = surfaceView.getHolder().getSurface();
} else if (win instanceof SurfaceHolder) {
SurfaceHolder holder = (SurfaceHolder) win;
sur = holder.getSurface();
} else if (win instanceof Surface) {
sur = (Surface) win;
}
EGLSurface surface;
if (sur != null) {
surface = _eglCreateWindowSurface(dpy, config, sur, attrib_list, offset);
} else if (win instanceof SurfaceTexture) {
surface = _eglCreateWindowSurfaceTexture(dpy, config, win, attrib_list, offset);
} else {
throw new java.lang.UnsupportedOperationException("eglCreateWindowSurface() can only be called with an instance of " + "Surface, SurfaceView, SurfaceTexture or SurfaceHolder at the moment, " + "this will be fixed later.");
}
return surface;
}
use of android.graphics.SurfaceTexture in project CameraFilter by nekocode.
the class CameraRenderer method run.
@Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
setSelectedFilter(selectedFilterId);
// Create texture for camera preview
cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
try {
camera.setPreviewTexture(cameraSurfaceTexture);
camera.startPreview();
} catch (IOException ioe) {
// Something bad happened
}
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
if (gwidth < 0 && gheight < 0)
GLES20.glViewport(0, 0, gwidth = -gwidth, gheight = -gheight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[] { cameraTextureId }, 0);
}
Aggregations