Search in sources :

Example 6 with Frame

use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.

the class CachedFrameManager method dropOldestFrame.

private void dropOldestFrame() {
    int oldest = mAvailableFrames.firstKey();
    Frame frame = mAvailableFrames.get(oldest);
    mStorageSize -= frame.getFormat().getSize();
    frame.releaseNativeAllocation();
    mAvailableFrames.remove(oldest);
}
Also used : Frame(android.filterfw.core.Frame)

Example 7 with Frame

use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.

the class FilterContext method tearDown.

public synchronized void tearDown() {
    // Release stored frames
    for (Frame frame : mStoredFrames.values()) {
        frame.release();
    }
    mStoredFrames.clear();
    // Release graphs
    for (FilterGraph graph : mGraphs) {
        graph.tearDown(this);
    }
    mGraphs.clear();
    // Release frame manager
    if (mFrameManager != null) {
        mFrameManager.tearDown();
        mFrameManager = null;
    }
    // Release GL context
    if (mGLEnvironment != null) {
        mGLEnvironment.tearDown();
        mGLEnvironment = null;
    }
}
Also used : Frame(android.filterfw.core.Frame)

Example 8 with Frame

use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.

the class StringSource method process.

@Override
public void process(FilterContext env) {
    Frame output = env.getFrameManager().newFrame(mOutputFormat);
    output.setObjectValue(mString);
    output.setTimestamp(Frame.TIMESTAMP_UNKNOWN);
    pushOutput("string", output);
    closeOutputPort("string");
}
Also used : Frame(android.filterfw.core.Frame)

Example 9 with Frame

use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.

the class ToUpperCase method process.

@Override
public void process(FilterContext env) {
    Frame input = pullInput("mixedcase");
    String inputString = (String) input.getObjectValue();
    Frame output = env.getFrameManager().newFrame(mOutputFormat);
    output.setObjectValue(inputString.toUpperCase());
    pushOutput("uppercase", output);
}
Also used : Frame(android.filterfw.core.Frame)

Example 10 with Frame

use of android.filterfw.core.Frame in project android_frameworks_base by ParanoidAndroid.

the class BackDropperFilter method process.

public void process(FilterContext context) {
    // Grab inputs and ready intermediate frames and outputs.
    Frame video = pullInput("video");
    Frame background = pullInput("background");
    allocateFrames(video.getFormat(), context);
    // Update learning rate after initial learning period
    if (mStartLearning) {
        if (mLogVerbose)
            Log.v(TAG, "Starting learning");
        mBgUpdateMeanProgram.setHostValue("bg_adapt_rate", mAdaptRateLearning);
        mBgUpdateMeanProgram.setHostValue("fg_adapt_rate", mAdaptRateLearning);
        mBgUpdateVarianceProgram.setHostValue("bg_adapt_rate", mAdaptRateLearning);
        mBgUpdateVarianceProgram.setHostValue("fg_adapt_rate", mAdaptRateLearning);
        mFrameCount = 0;
    }
    // Select correct pingpong buffers
    int inputIndex = mPingPong ? 0 : 1;
    int outputIndex = mPingPong ? 1 : 0;
    mPingPong = !mPingPong;
    // Check relative aspect ratios
    updateBgScaling(video, background, mBackgroundFitModeChanged);
    mBackgroundFitModeChanged = false;
    // Make copies for input frames to GLFrames
    copyShaderProgram.process(video, mVideoInput);
    copyShaderProgram.process(background, mBgInput);
    mVideoInput.generateMipMap();
    mVideoInput.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
    mBgInput.generateMipMap();
    mBgInput.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
    if (mStartLearning) {
        copyShaderProgram.process(mVideoInput, mBgMean[inputIndex]);
        mStartLearning = false;
    }
    // Process shaders
    Frame[] distInputs = { mVideoInput, mBgMean[inputIndex], mBgVariance[inputIndex] };
    mBgDistProgram.process(distInputs, mDistance);
    mDistance.generateMipMap();
    mDistance.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
    mBgMaskProgram.process(mDistance, mMask);
    mMask.generateMipMap();
    mMask.setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
    Frame[] autoWBInputs = { mVideoInput, mBgInput };
    mAutomaticWhiteBalanceProgram.process(autoWBInputs, mAutoWB);
    if (mFrameCount <= mLearningDuration) {
        // During learning
        pushOutput("video", video);
        if (mFrameCount == mLearningDuration - mLearningVerifyDuration) {
            copyShaderProgram.process(mMask, mMaskVerify[outputIndex]);
            mBgUpdateMeanProgram.setHostValue("bg_adapt_rate", mAdaptRateBg);
            mBgUpdateMeanProgram.setHostValue("fg_adapt_rate", mAdaptRateFg);
            mBgUpdateVarianceProgram.setHostValue("bg_adapt_rate", mAdaptRateBg);
            mBgUpdateVarianceProgram.setHostValue("fg_adapt_rate", mAdaptRateFg);
        } else if (mFrameCount > mLearningDuration - mLearningVerifyDuration) {
            // In the learning verification stage, compute background masks and a weighted average
            //   with weights grow exponentially with time
            Frame[] maskVerifyInputs = { mMaskVerify[inputIndex], mMask };
            mMaskVerifyProgram.process(maskVerifyInputs, mMaskVerify[outputIndex]);
            mMaskVerify[outputIndex].generateMipMap();
            mMaskVerify[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
        }
        if (mFrameCount == mLearningDuration) {
            // In the last verification frame, verify if the verification mask is almost blank
            // If not, restart learning
            copyShaderProgram.process(mMaskVerify[outputIndex], mMaskAverage);
            ByteBuffer mMaskAverageByteBuffer = mMaskAverage.getData();
            byte[] mask_average = mMaskAverageByteBuffer.array();
            int bi = (int) (mask_average[3] & 0xFF);
            if (mLogVerbose) {
                Log.v(TAG, String.format("Mask_average is %d, threshold is %d", bi, DEFAULT_LEARNING_DONE_THRESHOLD));
            }
            if (bi >= DEFAULT_LEARNING_DONE_THRESHOLD) {
                // Restart learning
                mStartLearning = true;
            } else {
                if (mLogVerbose)
                    Log.v(TAG, "Learning done");
                if (mLearningDoneListener != null) {
                    mLearningDoneListener.onLearningDone(this);
                }
            }
        }
    } else {
        Frame output = context.getFrameManager().newFrame(video.getFormat());
        Frame[] subtractInputs = { video, background, mMask, mAutoWB };
        mBgSubtractProgram.process(subtractInputs, output);
        pushOutput("video", output);
        output.release();
    }
    // Compute mean and variance of the background
    if (mFrameCount < mLearningDuration - mLearningVerifyDuration || mAdaptRateBg > 0.0 || mAdaptRateFg > 0.0) {
        Frame[] meanUpdateInputs = { mVideoInput, mBgMean[inputIndex], mMask };
        mBgUpdateMeanProgram.process(meanUpdateInputs, mBgMean[outputIndex]);
        mBgMean[outputIndex].generateMipMap();
        mBgMean[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
        Frame[] varianceUpdateInputs = { mVideoInput, mBgMean[inputIndex], mBgVariance[inputIndex], mMask };
        mBgUpdateVarianceProgram.process(varianceUpdateInputs, mBgVariance[outputIndex]);
        mBgVariance[outputIndex].generateMipMap();
        mBgVariance[outputIndex].setTextureParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST);
    }
    // Provide debug output to two smaller viewers
    if (mProvideDebugOutputs) {
        Frame dbg1 = context.getFrameManager().newFrame(video.getFormat());
        mCopyOutProgram.process(video, dbg1);
        pushOutput("debug1", dbg1);
        dbg1.release();
        Frame dbg2 = context.getFrameManager().newFrame(mMemoryFormat);
        mCopyOutProgram.process(mMask, dbg2);
        pushOutput("debug2", dbg2);
        dbg2.release();
    }
    mFrameCount++;
    if (mLogVerbose) {
        if (mFrameCount % 30 == 0) {
            if (startTime == -1) {
                context.getGLEnvironment().activate();
                GLES20.glFinish();
                startTime = SystemClock.elapsedRealtime();
            } else {
                context.getGLEnvironment().activate();
                GLES20.glFinish();
                long endTime = SystemClock.elapsedRealtime();
                Log.v(TAG, "Avg. frame duration: " + String.format("%.2f", (endTime - startTime) / 30.) + " ms. Avg. fps: " + String.format("%.2f", 1000. / ((endTime - startTime) / 30.)));
                startTime = endTime;
            }
        }
    }
}
Also used : Frame(android.filterfw.core.Frame) GLFrame(android.filterfw.core.GLFrame) ByteBuffer(java.nio.ByteBuffer)

Aggregations

Frame (android.filterfw.core.Frame)414 FrameFormat (android.filterfw.core.FrameFormat)198 GLFrame (android.filterfw.core.GLFrame)73 MutableFrameFormat (android.filterfw.core.MutableFrameFormat)47 NativeFrame (android.filterfw.core.NativeFrame)38 Quad (android.filterfw.geometry.Quad)24 GLEnvironment (android.filterfw.core.GLEnvironment)18 ShaderProgram (android.filterfw.core.ShaderProgram)18 ByteBuffer (java.nio.ByteBuffer)18 Point (android.filterfw.geometry.Point)12 IOException (java.io.IOException)12 FrameManager (android.filterfw.core.FrameManager)6 Bitmap (android.graphics.Bitmap)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)6 Map (java.util.Map)6 SortedMap (java.util.SortedMap)6 TreeMap (java.util.TreeMap)6 CachedFrameManager (android.filterfw.core.CachedFrameManager)1