use of android.filterfw.core.GLEnvironment in project android_frameworks_base by ParanoidAndroid.
the class EffectContext method initInCurrentGlContext.
private void initInCurrentGlContext() {
if (!GLEnvironment.isAnyContextActive()) {
throw new RuntimeException("Attempting to initialize EffectContext with no active " + "GL context!");
}
GLEnvironment glEnvironment = new GLEnvironment();
glEnvironment.initWithCurrentContext();
mFilterContext.initGLEnvironment(glEnvironment);
}
use of android.filterfw.core.GLEnvironment in project android_frameworks_base by ParanoidAndroid.
the class MediaEncoderFilter method stopRecording.
private void stopRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Stopping recording");
mRecordingActive = false;
mNumFramesEncoded = 0;
GLEnvironment glEnv = context.getGLEnvironment();
// be called before calling Stop on the mediarecorder
if (mLogVerbose)
Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId));
glEnv.unregisterSurfaceId(mSurfaceId);
try {
mMediaRecorder.stop();
} catch (RuntimeException e) {
throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e);
}
mMediaRecorder.release();
mMediaRecorder = null;
mScreen.release();
mScreen = null;
// to be done to finalize media.
if (mRecordingDoneListener != null) {
mRecordingDoneListener.onRecordingDone();
}
}
use of android.filterfw.core.GLEnvironment in project platform_frameworks_base by android.
the class SurfaceTextureTarget method process.
@Override
public synchronized void process(FilterContext context) {
// Surface is not registered. Nothing to render into.
if (mSurfaceId <= 0) {
return;
}
GLEnvironment glEnv = context.getGLEnvironment();
// Get input frame
Frame input = pullInput("frame");
boolean createdFrame = false;
float currentAspectRatio = (float) input.getFormat().getWidth() / input.getFormat().getHeight();
if (currentAspectRatio != mAspectRatio) {
if (mLogVerbose) {
Log.v(TAG, "Process. New aspect ratio: " + currentAspectRatio + ", previously: " + mAspectRatio + ". Thread: " + Thread.currentThread());
}
mAspectRatio = currentAspectRatio;
updateTargetRect();
}
// See if we need to copy to GPU
Frame gpuFrame = null;
int target = input.getFormat().getTarget();
if (target != FrameFormat.TARGET_GPU) {
gpuFrame = context.getFrameManager().duplicateFrameToTarget(input, FrameFormat.TARGET_GPU);
createdFrame = true;
} else {
gpuFrame = input;
}
// Activate our surface
glEnv.activateSurfaceWithId(mSurfaceId);
// Process
mProgram.process(gpuFrame, mScreen);
glEnv.setSurfaceTimestamp(input.getTimestamp());
// And swap buffers
glEnv.swapBuffers();
if (createdFrame) {
gpuFrame.release();
}
}
use of android.filterfw.core.GLEnvironment in project platform_frameworks_base by android.
the class MediaEncoderFilter method process.
@Override
public void process(FilterContext context) {
GLEnvironment glEnv = context.getGLEnvironment();
// Get input frame
Frame input = pullInput("videoframe");
// Check if recording needs to start
if (!mRecordingActive && mRecording) {
startRecording(context);
}
// Check if recording needs to stop
if (mRecordingActive && !mRecording) {
stopRecording(context);
}
if (!mRecordingActive)
return;
if (mCaptureTimeLapse) {
if (skipFrameAndModifyTimestamp(input.getTimestamp())) {
return;
}
} else {
mTimestampNs = input.getTimestamp();
}
// Activate our surface
glEnv.activateSurfaceWithId(mSurfaceId);
// Process
mProgram.process(input, mScreen);
// Set timestamp from input
glEnv.setSurfaceTimestamp(mTimestampNs);
// And swap buffers
glEnv.swapBuffers();
mNumFramesEncoded++;
}
use of android.filterfw.core.GLEnvironment in project platform_frameworks_base by android.
the class MffEnvironment method createGLEnvironment.
/**
* Create and activate a new GL environment for use in this filter context.
*/
public void createGLEnvironment() {
GLEnvironment glEnvironment = new GLEnvironment();
glEnvironment.initWithNewContext();
setGLEnvironment(glEnvironment);
}
Aggregations