Search in sources :

Example 66 with SurfaceTexture

use of android.graphics.SurfaceTexture in project chromeview by pwnall.

the class VideoCapture method allocate.

// Returns true on success, false otherwise.
@CalledByNative
public boolean allocate(int width, int height, int frameRate) {
    Log.d(TAG, "allocate: requested width=" + width + ", height=" + height + ", frameRate=" + frameRate);
    try {
        mCamera = Camera.open(mId);
        Camera.CameraInfo camera_info = new Camera.CameraInfo();
        Camera.getCameraInfo(mId, camera_info);
        mCameraOrientation = camera_info.orientation;
        mCameraFacing = camera_info.facing;
        mDeviceOrientation = getDeviceOrientation();
        Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + ", camera orientation=" + mCameraOrientation + ", facing=" + mCameraFacing);
        Camera.Parameters parameters = mCamera.getParameters();
        // Calculate fps.
        List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
        int frameRateInMs = frameRate * 1000;
        boolean fpsIsSupported = false;
        int fpsMin = 0;
        int fpsMax = 0;
        Iterator itFpsRange = listFpsRange.iterator();
        while (itFpsRange.hasNext()) {
            int[] fpsRange = (int[]) itFpsRange.next();
            if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
                fpsIsSupported = true;
                fpsMin = fpsRange[0];
                fpsMax = fpsRange[1];
                break;
            }
        }
        if (!fpsIsSupported) {
            Log.e(TAG, "allocate: fps " + frameRate + " is not supported");
            return false;
        }
        mCurrentCapability = new CaptureCapability();
        mCurrentCapability.mDesiredFps = frameRate;
        // Calculate size.
        List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes();
        int minDiff = Integer.MAX_VALUE;
        int matchedWidth = width;
        int matchedHeight = height;
        Iterator itCameraSize = listCameraSize.iterator();
        while (itCameraSize.hasNext()) {
            Camera.Size size = (Camera.Size) itCameraSize.next();
            int diff = Math.abs(size.width - width) + Math.abs(size.height - height);
            Log.d(TAG, "allocate: support resolution (" + size.width + ", " + size.height + "), diff=" + diff);
            // (i.e., with no padding).
            if (diff < minDiff && (size.width % 32 == 0)) {
                minDiff = diff;
                matchedWidth = size.width;
                matchedHeight = size.height;
            }
        }
        if (minDiff == Integer.MAX_VALUE) {
            Log.e(TAG, "allocate: can not find a resolution whose width " + "is multiple of 32");
            return false;
        }
        mCurrentCapability.mWidth = matchedWidth;
        mCurrentCapability.mHeight = matchedHeight;
        Log.d(TAG, "allocate: matched width=" + matchedWidth + ", height=" + matchedHeight);
        parameters.setPreviewSize(matchedWidth, matchedHeight);
        parameters.setPreviewFormat(mPixelFormat);
        parameters.setPreviewFpsRange(fpsMin, fpsMax);
        mCamera.setParameters(parameters);
        // Set SurfaceTexture.
        mGlTextures = new int[1];
        // Generate one texture pointer and bind it as an external texture.
        GLES20.glGenTextures(1, mGlTextures, 0);
        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
        // No mip-mapping with camera source.
        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        // Clamp to edge is only option.
        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
        mSurfaceTexture.setOnFrameAvailableListener(null);
        mCamera.setPreviewTexture(mSurfaceTexture);
        int bufSize = matchedWidth * matchedHeight * ImageFormat.getBitsPerPixel(mPixelFormat) / 8;
        for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
            byte[] buffer = new byte[bufSize];
            mCamera.addCallbackBuffer(buffer);
        }
        mExpectedFrameSize = bufSize;
    } catch (IOException ex) {
        Log.e(TAG, "allocate: " + ex);
        return false;
    }
    return true;
}
Also used : IOException(java.io.IOException) SurfaceTexture(android.graphics.SurfaceTexture) Iterator(java.util.Iterator) Camera(android.hardware.Camera) CalledByNative(org.chromium.base.CalledByNative)

Example 67 with SurfaceTexture

use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.

the class CameraSource method open.

@Override
public void open(FilterContext context) {
    if (mLogVerbose)
        Log.v(TAG, "Opening");
    // Open camera
    mCamera = Camera.open(mCameraId);
    // Set parameters
    getCameraParameters();
    mCamera.setParameters(mCameraParameters);
    // Create frame formats
    createFormats();
    // Bind it to our camera frame
    mCameraFrame = (GLFrame) context.getFrameManager().newBoundFrame(mOutputFormat, GLFrame.EXTERNAL_TEXTURE, 0);
    mSurfaceTexture = new SurfaceTexture(mCameraFrame.getTextureId());
    try {
        mCamera.setPreviewTexture(mSurfaceTexture);
    } catch (IOException e) {
        throw new RuntimeException("Could not bind camera surface texture: " + e.getMessage() + "!");
    }
    // Connect SurfaceTexture to callback
    mSurfaceTexture.setOnFrameAvailableListener(onCameraFrameAvailableListener);
    // Start the preview
    mNewFrameAvailable = false;
    mCamera.startPreview();
}
Also used : SurfaceTexture(android.graphics.SurfaceTexture) IOException(java.io.IOException)

Example 68 with SurfaceTexture

use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.

the class CameraDeviceBinderTest method testCreateStreamTwo.

@SmallTest
public void testCreateStreamTwo() throws Exception {
    // Create first stream
    int streamId = mCameraUser.createStream(mOutputConfiguration);
    assertEquals(0, streamId);
    try {
        mCameraUser.createStream(mOutputConfiguration);
        fail("Created same stream twice");
    } catch (ServiceSpecificException e) {
        assertEquals("Created same stream twice", ICameraService.ERROR_ALREADY_EXISTS, e.errorCode);
    }
    // Create second stream with a different surface.
    SurfaceTexture surfaceTexture = new SurfaceTexture(/* ignored */
    0);
    surfaceTexture.setDefaultBufferSize(640, 480);
    Surface surface2 = new Surface(surfaceTexture);
    OutputConfiguration output2 = new OutputConfiguration(surface2);
    int streamId2 = mCameraUser.createStream(output2);
    assertEquals(1, streamId2);
    // Clean up streams
    mCameraUser.deleteStream(streamId);
    mCameraUser.deleteStream(streamId2);
}
Also used : ServiceSpecificException(android.os.ServiceSpecificException) SurfaceTexture(android.graphics.SurfaceTexture) OutputConfiguration(android.hardware.camera2.params.OutputConfiguration) Surface(android.view.Surface) SmallTest(android.test.suitebuilder.annotation.SmallTest)

Example 69 with SurfaceTexture

use of android.graphics.SurfaceTexture in project platform_frameworks_base by android.

the class EGL14 method eglCreateWindowSurface.

public static EGLSurface eglCreateWindowSurface(EGLDisplay dpy, EGLConfig config, Object win, int[] attrib_list, int offset) {
    Surface sur = null;
    if (win instanceof SurfaceView) {
        SurfaceView surfaceView = (SurfaceView) win;
        sur = surfaceView.getHolder().getSurface();
    } else if (win instanceof SurfaceHolder) {
        SurfaceHolder holder = (SurfaceHolder) win;
        sur = holder.getSurface();
    } else if (win instanceof Surface) {
        sur = (Surface) win;
    }
    EGLSurface surface;
    if (sur != null) {
        surface = _eglCreateWindowSurface(dpy, config, sur, attrib_list, offset);
    } else if (win instanceof SurfaceTexture) {
        surface = _eglCreateWindowSurfaceTexture(dpy, config, win, attrib_list, offset);
    } else {
        throw new java.lang.UnsupportedOperationException("eglCreateWindowSurface() can only be called with an instance of " + "Surface, SurfaceView, SurfaceTexture or SurfaceHolder at the moment, " + "this will be fixed later.");
    }
    return surface;
}
Also used : SurfaceHolder(android.view.SurfaceHolder) SurfaceTexture(android.graphics.SurfaceTexture) SurfaceView(android.view.SurfaceView) Surface(android.view.Surface)

Example 70 with SurfaceTexture

use of android.graphics.SurfaceTexture in project CameraFilter by nekocode.

the class CameraRenderer method run.

@Override
public void run() {
    initGL(surfaceTexture);
    // Setup camera filters map
    cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
    cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
    cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
    cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
    cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
    cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
    cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
    cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
    cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
    cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
    cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
    cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
    cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
    cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
    cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
    cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
    cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
    cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
    cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
    cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
    cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
    setSelectedFilter(selectedFilterId);
    // Create texture for camera preview
    cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
    cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
    // Start camera preview
    try {
        camera.setPreviewTexture(cameraSurfaceTexture);
        camera.startPreview();
    } catch (IOException ioe) {
    // Something bad happened
    }
    // Render loop
    while (!Thread.currentThread().isInterrupted()) {
        try {
            if (gwidth < 0 && gheight < 0)
                GLES20.glViewport(0, 0, gwidth = -gwidth, gheight = -gheight);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            // Update the camera preview texture
            synchronized (this) {
                cameraSurfaceTexture.updateTexImage();
            }
            // Draw camera preview
            selectedFilter.draw(cameraTextureId, gwidth, gheight);
            // Flush
            GLES20.glFlush();
            egl10.eglSwapBuffers(eglDisplay, eglSurface);
            Thread.sleep(DRAW_INTERVAL);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
    }
    cameraSurfaceTexture.release();
    GLES20.glDeleteTextures(1, new int[] { cameraTextureId }, 0);
}
Also used : ChromaticAberrationFilter(cn.nekocode.camerafilter.filter.ChromaticAberrationFilter) ContrastFilter(cn.nekocode.camerafilter.filter.ContrastFilter) NoiseWarpFilter(cn.nekocode.camerafilter.filter.NoiseWarpFilter) TileMosaicFilter(cn.nekocode.camerafilter.filter.TileMosaicFilter) CrosshatchFilter(cn.nekocode.camerafilter.filter.CrosshatchFilter) MoneyFilter(cn.nekocode.camerafilter.filter.MoneyFilter) PixelizeFilter(cn.nekocode.camerafilter.filter.PixelizeFilter) IOException(java.io.IOException) MappingFilter(cn.nekocode.camerafilter.filter.MappingFilter) LegofiedFilter(cn.nekocode.camerafilter.filter.LegofiedFilter) PolygonizationFilter(cn.nekocode.camerafilter.filter.PolygonizationFilter) BlueorangeFilter(cn.nekocode.camerafilter.filter.BlueorangeFilter) EdgeDetectionFilter(cn.nekocode.camerafilter.filter.EdgeDetectionFilter) BasicDeformFilter(cn.nekocode.camerafilter.filter.BasicDeformFilter) CrackedFilter(cn.nekocode.camerafilter.filter.CrackedFilter) SurfaceTexture(android.graphics.SurfaceTexture) RefractionFilter(cn.nekocode.camerafilter.filter.RefractionFilter) TrianglesMosaicFilter(cn.nekocode.camerafilter.filter.TrianglesMosaicFilter) AsciiArtFilter(cn.nekocode.camerafilter.filter.AsciiArtFilter) OriginalFilter(cn.nekocode.camerafilter.filter.OriginalFilter) EMInterferenceFilter(cn.nekocode.camerafilter.filter.EMInterferenceFilter) LichtensteinEsqueFilter(cn.nekocode.camerafilter.filter.LichtensteinEsqueFilter) JFAVoronoiFilter(cn.nekocode.camerafilter.filter.JFAVoronoiFilter)

Aggregations

SurfaceTexture (android.graphics.SurfaceTexture)99 Surface (android.view.Surface)49 SurfaceView (android.view.SurfaceView)16 IOException (java.io.IOException)16 SurfaceHolder (android.view.SurfaceHolder)14 EGLSurface (android.opengl.EGLSurface)13 OutputConfiguration (android.hardware.camera2.params.OutputConfiguration)5 ServiceSpecificException (android.os.ServiceSpecificException)5 SmallTest (android.test.suitebuilder.annotation.SmallTest)5 Size (android.util.Size)5 RequiresPermission (android.support.annotation.RequiresPermission)4 TextureView (android.view.TextureView)4 PrintWriter (java.io.PrintWriter)3 StringWriter (java.io.StringWriter)3 Camera (android.hardware.Camera)2 WindowManager (android.view.WindowManager)2 FrameLayout (android.widget.FrameLayout)2 ByteBuffer (java.nio.ByteBuffer)2 CalledByNative (org.chromium.base.CalledByNative)2 Size (android.hardware.Camera.Size)1