use of android.filterfw.core.GLEnvironment in project android_frameworks_base by crdroidandroid.
the class MediaEncoderFilter method process.
@Override
public void process(FilterContext context) {
GLEnvironment glEnv = context.getGLEnvironment();
// Get input frame
Frame input = pullInput("videoframe");
// Check if recording needs to start
if (!mRecordingActive && mRecording) {
startRecording(context);
}
// Check if recording needs to stop
if (mRecordingActive && !mRecording) {
stopRecording(context);
}
if (!mRecordingActive)
return;
if (mCaptureTimeLapse) {
if (skipFrameAndModifyTimestamp(input.getTimestamp())) {
return;
}
} else {
mTimestampNs = input.getTimestamp();
}
// Activate our surface
glEnv.activateSurfaceWithId(mSurfaceId);
// Process
mProgram.process(input, mScreen);
// Set timestamp from input
glEnv.setSurfaceTimestamp(mTimestampNs);
// And swap buffers
glEnv.swapBuffers();
mNumFramesEncoded++;
}
use of android.filterfw.core.GLEnvironment in project android_frameworks_base by crdroidandroid.
the class MediaEncoderFilter method stopRecording.
private void stopRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Stopping recording");
mRecordingActive = false;
mNumFramesEncoded = 0;
GLEnvironment glEnv = context.getGLEnvironment();
// be called before calling Stop on the mediarecorder
if (mLogVerbose)
Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId));
glEnv.unregisterSurfaceId(mSurfaceId);
try {
mMediaRecorder.stop();
} catch (RuntimeException e) {
throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e);
}
mMediaRecorder.release();
mMediaRecorder = null;
mScreen.release();
mScreen = null;
// to be done to finalize media.
if (mRecordingDoneListener != null) {
mRecordingDoneListener.onRecordingDone();
}
}
use of android.filterfw.core.GLEnvironment in project android_frameworks_base by crdroidandroid.
the class SurfaceTextureTarget method process.
@Override
public synchronized void process(FilterContext context) {
// Surface is not registered. Nothing to render into.
if (mSurfaceId <= 0) {
return;
}
GLEnvironment glEnv = context.getGLEnvironment();
// Get input frame
Frame input = pullInput("frame");
boolean createdFrame = false;
float currentAspectRatio = (float) input.getFormat().getWidth() / input.getFormat().getHeight();
if (currentAspectRatio != mAspectRatio) {
if (mLogVerbose) {
Log.v(TAG, "Process. New aspect ratio: " + currentAspectRatio + ", previously: " + mAspectRatio + ". Thread: " + Thread.currentThread());
}
mAspectRatio = currentAspectRatio;
updateTargetRect();
}
// See if we need to copy to GPU
Frame gpuFrame = null;
int target = input.getFormat().getTarget();
if (target != FrameFormat.TARGET_GPU) {
gpuFrame = context.getFrameManager().duplicateFrameToTarget(input, FrameFormat.TARGET_GPU);
createdFrame = true;
} else {
gpuFrame = input;
}
// Activate our surface
glEnv.activateSurfaceWithId(mSurfaceId);
// Process
mProgram.process(gpuFrame, mScreen);
glEnv.setSurfaceTimestamp(input.getTimestamp());
// And swap buffers
glEnv.swapBuffers();
if (createdFrame) {
gpuFrame.release();
}
}
use of android.filterfw.core.GLEnvironment in project android_frameworks_base by AOSPA.
the class MediaEncoderFilter method process.
@Override
public void process(FilterContext context) {
GLEnvironment glEnv = context.getGLEnvironment();
// Get input frame
Frame input = pullInput("videoframe");
// Check if recording needs to start
if (!mRecordingActive && mRecording) {
startRecording(context);
}
// Check if recording needs to stop
if (mRecordingActive && !mRecording) {
stopRecording(context);
}
if (!mRecordingActive)
return;
if (mCaptureTimeLapse) {
if (skipFrameAndModifyTimestamp(input.getTimestamp())) {
return;
}
} else {
mTimestampNs = input.getTimestamp();
}
// Activate our surface
glEnv.activateSurfaceWithId(mSurfaceId);
// Process
mProgram.process(input, mScreen);
// Set timestamp from input
glEnv.setSurfaceTimestamp(mTimestampNs);
// And swap buffers
glEnv.swapBuffers();
mNumFramesEncoded++;
}
use of android.filterfw.core.GLEnvironment in project android_frameworks_base by AOSPA.
the class MediaEncoderFilter method stopRecording.
private void stopRecording(FilterContext context) {
if (mLogVerbose)
Log.v(TAG, "Stopping recording");
mRecordingActive = false;
mNumFramesEncoded = 0;
GLEnvironment glEnv = context.getGLEnvironment();
// be called before calling Stop on the mediarecorder
if (mLogVerbose)
Log.v(TAG, String.format("Unregistering surface %d", mSurfaceId));
glEnv.unregisterSurfaceId(mSurfaceId);
try {
mMediaRecorder.stop();
} catch (RuntimeException e) {
throw new MediaRecorderStopException("MediaRecorder.stop() failed!", e);
}
mMediaRecorder.release();
mMediaRecorder = null;
mScreen.release();
mScreen = null;
// to be done to finalize media.
if (mRecordingDoneListener != null) {
mRecordingDoneListener.onRecordingDone();
}
}
Aggregations