use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.
the class BackDropperFilter method process.
public void process(FilterContext context) {
// Grab inputs and ready intermediate frames and outputs.
Frame video = pullInput("video");
Frame background = pullInput("background");
allocateFrames(video.getFormat(), context);
// Update learning rate after initial learning period
if (mStartLearning) {
if (mLogVerbose)
Log.v(TAG, "Starting learning");
mBgUpdateMeanProgram.setHostValue("bg_adapt_rate", mAdaptRateLearning);
mBgUpdateMeanProgram.setHostValue("fg_adapt_rate", mAdaptRateLearning);
mBgUpdateVarianceProgram.setHostValue("bg_adapt_rate", mAdaptRateLearning);
mBgUpdateVarianceProgram.setHostValue("fg_adapt_rate", mAdaptRateLearning);
mFrameCount = 0;
}
// Select correct pingpong buffers
int inputIndex = mPingPong ? 0 : 1;
int outputIndex = mPingPong ? 1 : 0;
mPingPong = !mPingPong;
// Check relative aspect ratios
updateBgScaling(video, background, mBackgroundFitModeChanged);
mBackgroundFitModeChanged = false;
// Make copies for input frames to GLFrames
copyShaderProgram.process(video, mVideoInput);
copyShaderProgram.process(background, mBgInput);
mVideoInput.generateMipMap();
mVideoInput.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
mBgInput.generateMipMap();
mBgInput.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
if (mStartLearning) {
copyShaderProgram.process(mVideoInput, mBgMean[inputIndex]);
mStartLearning = false;
}
// Process shaders
Frame[] distInputs = { mVideoInput, mBgMean[inputIndex], mBgVariance[inputIndex] };
mBgDistProgram.process(distInputs, mDistance);
mDistance.generateMipMap();
mDistance.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
mBgMaskProgram.process(mDistance, mMask);
mMask.generateMipMap();
mMask.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
Frame[] autoWBInputs = { mVideoInput, mBgInput };
mAutomaticWhiteBalanceProgram.process(autoWBInputs, mAutoWB);
if (mFrameCount <= mLearningDuration) {
// During learning
pushOutput("video", video);
if (mFrameCount == mLearningDuration - mLearningVerifyDuration) {
copyShaderProgram.process(mMask, mMaskVerify[outputIndex]);
mBgUpdateMeanProgram.setHostValue("bg_adapt_rate", mAdaptRateBg);
mBgUpdateMeanProgram.setHostValue("fg_adapt_rate", mAdaptRateFg);
mBgUpdateVarianceProgram.setHostValue("bg_adapt_rate", mAdaptRateBg);
mBgUpdateVarianceProgram.setHostValue("fg_adapt_rate", mAdaptRateFg);
} else if (mFrameCount > mLearningDuration - mLearningVerifyDuration) {
// In the learning verification stage, compute background masks and a weighted average
// with weights grow exponentially with time
Frame[] maskVerifyInputs = { mMaskVerify[inputIndex], mMask };
mMaskVerifyProgram.process(maskVerifyInputs, mMaskVerify[outputIndex]);
mMaskVerify[outputIndex].generateMipMap();
mMaskVerify[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
}
if (mFrameCount == mLearningDuration) {
// In the last verification frame, verify if the verification mask is almost blank
// If not, restart learning
copyShaderProgram.process(mMaskVerify[outputIndex], mMaskAverage);
ByteBuffer mMaskAverageByteBuffer = mMaskAverage.getData();
byte[] mask_average = mMaskAverageByteBuffer.array();
int bi = (int) (mask_average[3] & 0xFF);
if (mLogVerbose) {
Log.v(TAG, String.format("Mask_average is %d, threshold is %d", bi, DEFAULT_LEARNING_DONE_THRESHOLD));
}
if (bi >= DEFAULT_LEARNING_DONE_THRESHOLD) {
// Restart learning
mStartLearning = true;
} else {
if (mLogVerbose)
Log.v(TAG, "Learning done");
if (mLearningDoneListener != null) {
mLearningDoneListener.onLearningDone(this);
}
}
}
} else {
Frame output = context.getFrameManager().newFrame(video.getFormat());
Frame[] subtractInputs = { video, background, mMask, mAutoWB };
mBgSubtractProgram.process(subtractInputs, output);
pushOutput("video", output);
output.release();
}
// Compute mean and variance of the background
if (mFrameCount < mLearningDuration - mLearningVerifyDuration || mAdaptRateBg > 0.0 || mAdaptRateFg > 0.0) {
Frame[] meanUpdateInputs = { mVideoInput, mBgMean[inputIndex], mMask };
mBgUpdateMeanProgram.process(meanUpdateInputs, mBgMean[outputIndex]);
mBgMean[outputIndex].generateMipMap();
mBgMean[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
Frame[] varianceUpdateInputs = { mVideoInput, mBgMean[inputIndex], mBgVariance[inputIndex], mMask };
mBgUpdateVarianceProgram.process(varianceUpdateInputs, mBgVariance[outputIndex]);
mBgVariance[outputIndex].generateMipMap();
mBgVariance[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
}
// Provide debug output to two smaller viewers
if (mProvideDebugOutputs) {
Frame dbg1 = context.getFrameManager().newFrame(video.getFormat());
mCopyOutProgram.process(video, dbg1);
pushOutput("debug1", dbg1);
dbg1.release();
Frame dbg2 = context.getFrameManager().newFrame(mMemoryFormat);
mCopyOutProgram.process(mMask, dbg2);
pushOutput("debug2", dbg2);
dbg2.release();
}
mFrameCount++;
if (mLogVerbose) {
if (mFrameCount % 30 == 0) {
if (startTime == -1) {
context.getGLEnvironment().activate();
GLES20.glFinish();
startTime = SystemClock.elapsedRealtime();
} else {
context.getGLEnvironment().activate();
GLES20.glFinish();
long endTime = SystemClock.elapsedRealtime();
Log.v(TAG, "Avg. frame duration: " + String.format("%.2f", (endTime - startTime) / 30.) + " ms. Avg. fps: " + String.format("%.2f", 1000. / ((endTime - startTime) / 30.)));
startTime = endTime;
}
}
}
}
use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.
the class BitmapOverlayFilter method process.
@Override
public void process(FilterContext context) {
// Get input frame
Frame input = pullInput("image");
FrameFormat inputFormat = input.getFormat();
// Create output frame
Frame output = context.getFrameManager().newFrame(inputFormat);
// Create program if not created already
if (mProgram == null || inputFormat.getTarget() != mTarget) {
initProgram(context, inputFormat.getTarget());
}
if (mBitmap != null) {
Frame frame = createBitmapFrame(context);
// Process
Frame[] inputs = { input, frame };
mProgram.process(inputs, output);
frame.release();
} else {
output.setDataFromFrame(input);
}
// Push output
pushOutput("image", output);
// Release pushed frame
output.release();
}
use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.
the class BlackWhiteFilter method process.
@Override
public void process(FilterContext context) {
// Get input frame
Frame input = pullInput("image");
FrameFormat inputFormat = input.getFormat();
// Create program if not created already
if (mProgram == null || inputFormat.getTarget() != mTarget) {
initProgram(context, inputFormat.getTarget());
}
// Create output frame
Frame output = context.getFrameManager().newFrame(inputFormat);
// Process
mProgram.process(input, output);
// Push output
pushOutput("image", output);
// Release pushed frame
output.release();
}
use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.
the class CropRectFilter method process.
@Override
public void process(FilterContext context) {
// Get input frame
Frame input = pullInput("image");
FrameFormat inputFormat = input.getFormat();
// Create output frame
FrameFormat outputFormat = ImageFormat.create(mOutputWidth, mOutputHeight, ImageFormat.COLORSPACE_RGBA, FrameFormat.TARGET_GPU);
Frame output = context.getFrameManager().newFrame(outputFormat);
// Create program if not created already
if (mProgram == null || inputFormat.getTarget() != mTarget) {
initProgram(context, inputFormat.getTarget());
}
// Check if the frame size has changed
if (inputFormat.getWidth() != mWidth || inputFormat.getHeight() != mHeight) {
updateSourceRect(inputFormat.getWidth(), inputFormat.getHeight());
}
// Process
mProgram.process(input, output);
// Push output
pushOutput("image", output);
// Release pushed frame
output.release();
}
use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.
the class CrossProcessFilter method process.
@Override
public void process(FilterContext context) {
// Get input frame
Frame input = pullInput("image");
FrameFormat inputFormat = input.getFormat();
// Create program if not created already
if (mProgram == null || inputFormat.getTarget() != mTarget) {
initProgram(context, inputFormat.getTarget());
}
// Create output frame
Frame output = context.getFrameManager().newFrame(inputFormat);
// Process
mProgram.process(input, output);
// Push output
pushOutput("image", output);
// Release pushed frame
output.release();
}
Aggregations