Search in sources :

Example 1 with FastImageProcessingView

use of com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView in project UltimateAndroid by cymcsg.

the class ImageProcessingActivity method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
    view = new FastImageProcessingView(this);
    pipeline = new FastImageProcessingPipeline();
    view.setPipeline(pipeline);
    setContentView(view);
    usingCamera = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH);
    usingCamera = false;
    if (usingCamera) {
        input = new CameraPreviewInput(view);
    } else {
        input = new ImageResourceInput(view, this, R.drawable.image_processing_kukulkan);
    }
    filters = new ArrayList<BasicFilter>();
    addFilter(new FlipFilter(FlipFilter.FLIP_HORIZONTAL));
    addFilter(new MosaicFilter(this, R.drawable.image_processing_webcircles, new PointF(0.125f, 0.125f), new PointF(0.025f, 0.025f), 64, true));
    addFilter(new CGAColourSpaceFilter());
    addFilter(new KuwaharaRadius3Filter());
    //addFilter(new KuwaharaFilter(8)); //Will not work on devices that don't support for loop on shader
    addFilter(new VignetteFilter(new PointF(0.5f, 0.5f), new float[] { 0.3f, 0.8f, 0.3f }, 0.3f, 0.75f));
    addFilter(new GlassSphereFilter(new PointF(0.43f, 0.5f), 0.25f, 0.71f, 0.5f));
    addFilter(new SphereRefractionFilter(new PointF(0.43f, 0.5f), 0.25f, 0.71f, 0.5f));
    addFilter(new StretchDistortionFilter(new PointF(0.5f, 0.5f)));
    addFilter(new PinchDistortionFilter(new PointF(0.43f, 0.5f), 0.25f, 0.5f, 0.5f));
    addFilter(new BulgeDistortionFilter(new PointF(0.43f, 0.5f), 0.25f, 0.5f, 0.5f));
    addFilter(new SwirlFilter(new PointF(0.4f, 0.5f), 0.5f, 1f));
    addFilter(new PosterizeFilter(2f));
    addFilter(new EmbossFilter(1.5f));
    addFilter(new SmoothToonFilter(0.25f, 0.5f, 5f));
    addFilter(new ToonFilter(0.4f, 10f));
    addFilter(new ThresholdSketchFilter(0.7f));
    addFilter(new SketchFilter());
    addFilter(new CrosshatchFilter(0.005f, 0.0025f));
    addFilter(new HalftoneFilter(0.01f, 1f));
    addFilter(new PolkaDotFilter(0.9f, 0.03f, 1f));
    addFilter(new PolarPixellateFilter(new PointF(0.4f, 0.5f), new PointF(0.05f, 0.05f)));
    addFilter(new PixellateFilter(0.01f, 1f));
    addFilter(new ZoomBlurFilter(2f, new PointF(0.4f, 0.5f)));
    addFilter(new MotionBlurFilter(2f, 45f));
    addFilter(new OpeningFilter(1));
    addFilter(new OpeningRGBFilter(3));
    addFilter(new ClosingFilter(2));
    addFilter(new ClosingRGBFilter(4));
    addFilter(new ErosionRGBFilter(3));
    addFilter(new ErosionFilter(1));
    addFilter(new DilationRGBFilter(2));
    addFilter(new DilationFilter(4));
    addFilter(new CannyEdgeDetectionFilter(1.0f, 0.1f, 0.4f));
    addFilter(new ThresholdEdgeDetectionFilter(0.6f));
    addFilter(new SobelEdgeDetectionFilter());
    addFilter(new TiltShiftFilter(4f, 0.4f, 0.6f, 0.2f));
    addFilter(new BilateralBlurFilter(1f));
    addFilter(new MedianFilter());
    addFilter(new GaussianBlurPositionFilter(4f, 1.2f, new PointF(0.4f, 0.5f), 0.5f, 0.1f));
    addFilter(new GaussianSelectiveBlurFilter(4f, 1.2f, new PointF(0.4f, 0.5f), 0.5f, 0.1f));
    addFilter(new SingleComponentGaussianBlurFilter(2.3f));
    addFilter(new SingleComponentFastBlurFilter());
    addFilter(new FastBlurFilter());
    addFilter(new UnsharpMaskFilter(2.0f, 0.5f));
    addFilter(new SharpenFilter(1f));
    //addFilter(new LanczosResamplingFilter(256, 128));
    addFilter(new CropFilter(0.25f, 0f, 0.75f, 1f));
    BasicFilter cFilter1 = new CropFilter(0.25f, 0f, 0.75f, 1f);
    cFilter1.rotateClockwise90Degrees(1);
    addFilter(cFilter1);
    BasicFilter cFilter2 = new CropFilter(0.25f, 0f, 0.75f, 1f);
    cFilter2.rotateClockwise90Degrees(2);
    addFilter(cFilter2);
    BasicFilter cFilter3 = new CropFilter(0.25f, 0f, 0.75f, 1f);
    cFilter3.rotateClockwise90Degrees(3);
    addFilter(cFilter3);
    addFilter(new TransformFilter(new float[] { 1f, 0f, 0f, 0f, 0f, 1f, 0f, 0f, 0f, 0f, 1f, 0f, -0.5f, 0f, 0f, 1f }, false, false));
    addFilter(new ChromaKeyFilter(new float[] { 1.0f, 0.3f, 0.0f }, 0.4f, 0.1f));
    addFilter(new AdaptiveThresholdFilter());
    addFilter(new BoxBlurFilter());
    addFilter(new LuminanceThresholdFilter(0.4f));
    addFilter(new OpacityFilter(0.5f));
    addFilter(new SepiaFilter());
    addFilter(new HazeFilter(0.3f, 0.1f));
    addFilter(new FalseColourFilter(new float[] { 0.0f, 0.0f, 0.5f }, new float[] { 1.0f, 0.0f, 0.0f }));
    addFilter(new MonochromeFilter(new float[] { 1.0f, 0.8f, 0.8f }, 1.0f));
    addFilter(new ColourInvertFilter());
    addFilter(new SoftEleganceFilter(this));
    addFilter(new GaussianBlurFilter(2.3f));
    addFilter(new MissEtikateFilter(this));
    addFilter(new AmatorkaFilter(this));
    addFilter(new LookupFilter(this, R.drawable.image_processing_lookup_soft_elegance_1));
    addFilter(new HighlightShadowFilter(0f, 1f));
    Point[] defaultCurve = new Point[] { new Point(128, 128), new Point(64, 0), new Point(192, 255) };
    addFilter(new ToneCurveFilter(defaultCurve, defaultCurve, defaultCurve, defaultCurve));
    addFilter(new HueFilter(3.14f / 6f));
    addFilter(new BrightnessFilter(0.5f));
    addFilter(new ColourMatrixFilter(new float[] { 0.33f, 0f, 0f, 0f, 0f, 0.67f, 0f, 0f, 0f, 0f, 1.34f, 0f, 0.2f, 0.2f, 0.2f, 1.0f }, 0.5f));
    addFilter(new RGBFilter(0.33f, 0.67f, 1.34f));
    addFilter(new GreyScaleFilter());
    addFilter(new ConvolutionFilter(new float[] { 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f, 1 / 25f }, 5, 5));
    addFilter(new ExposureFilter(0.95f));
    addFilter(new ContrastFilter(1.5f));
    addFilter(new SaturationFilter(0.5f));
    addFilter(new GammaFilter(1.75f));
    addFilter(new LevelsFilter(0.2f, 0.8f, 1f, 0f, 1f));
    screen = new ScreenEndpoint(pipeline);
    input.addTarget(screen);
    for (BasicFilter filter : filters) {
        filter.addTarget(screen);
    }
    pipeline.addRootRenderer(input);
    pipeline.startRendering();
    final Context context = this;
    view.setOnTouchListener(new View.OnTouchListener() {

        public boolean onTouch(View v, MotionEvent e) {
            if (System.currentTimeMillis() - touchTime > 100) {
                pipeline.pauseRendering();
                touchTime = System.currentTimeMillis();
                if (curFilter == 0) {
                    input.removeTarget(screen);
                } else {
                    input.removeTarget(filters.get(curFilter - 1));
                    pipeline.addFilterToDestroy(filters.get(curFilter - 1));
                }
                curFilter = (curFilter + 1) % (filters.size() + 1);
                if (curFilter == 0) {
                    input.addTarget(screen);
                } else {
                    input.addTarget(filters.get(curFilter - 1));
                    Toast.makeText(context, filters.get(curFilter - 1).getClass().getSimpleName(), Toast.LENGTH_SHORT).show();
                }
                pipeline.startRendering();
                view.requestRender();
            }
            return false;
        }
    });
    Toast.makeText(this, "Tap the screen to change filter.", Toast.LENGTH_LONG).show();
}
Also used : PointF(android.graphics.PointF) ImageResourceInput(com.marshalchen.common.uimodule.imageprocessing.input.ImageResourceInput) ScreenEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.ScreenEndpoint) FastImageProcessingPipeline(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingPipeline) CameraPreviewInput(com.marshalchen.common.uimodule.imageprocessing.input.CameraPreviewInput) Context(android.content.Context) Point(android.graphics.Point) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) View(android.view.View) MotionEvent(android.view.MotionEvent) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) BasicFilter(com.marshalchen.common.uimodule.imageprocessing.filter.BasicFilter)

Example 2 with FastImageProcessingView

use of com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView in project UltimateAndroid by cymcsg.

the class ImageProcessingTwoInputfilterActivity method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
    view = new FastImageProcessingView(this);
    pipeline = new FastImageProcessingPipeline();
    view.setPipeline(pipeline);
    setContentView(view);
    usingCamera = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH);
    if (usingCamera) {
        input = new CameraPreviewInput(view);
    } else {
        input = new ImageResourceInput(view, this, R.drawable.image_processing_kukulkan);
    }
    input2 = new ImageResourceInput(view, this, R.drawable.test_back1);
    filters = new ArrayList<MultiInputFilter>();
    screen = new ScreenEndpoint(pipeline);
    addFilter(new MaskFilter());
    addFilter(new LinearBurnBlendFilter());
    //TODO fix android 2.2
    addFilter(new LuminosityBlendFilter());
    //TODO fix android 2.2
    addFilter(new SaturationBlendFilter());
    addFilter(new HueBlendFilter());
    //TODO fix android 2.2
    addFilter(new ColourBlendFilter());
    addFilter(new NormalBlendFilter());
    addFilter(new SourceOverBlendFilter());
    addFilter(new SoftLightBlendFilter());
    addFilter(new HardLightBlendFilter());
    addFilter(new DifferenceBlendFilter());
    addFilter(new ExclusionBlendFilter());
    addFilter(new ScreenBlendFilter());
    addFilter(new ColourDodgeBlendFilter());
    addFilter(new ColourBurnBlendFilter());
    addFilter(new LightenBlendFilter());
    addFilter(new DarkenBlendFilter());
    addFilter(new OverlayBlendFilter());
    addFilter(new DivideBlendFilter());
    addFilter(new SubtractBlendFilter());
    //TODO fix
    addFilter(new AddBlendFilter());
    addFilter(new MultiplyBlendFilter());
    addFilter(new DissolveBlendFilter(0.7f));
    addFilter(new ChromaKeyBlendFilter(new float[] { 1.0f, 0.3f, 0.0f }, 0.4f, 0.1f));
    addFilter(new AlphaBlendFilter(0.9f));
    input.addTarget(filters.get(0));
    input2.addTarget(filters.get(0));
    pipeline.addRootRenderer(input);
    pipeline.addRootRenderer(input2);
    pipeline.startRendering();
    final Context context = this;
    view.setOnTouchListener(new View.OnTouchListener() {

        public boolean onTouch(View v, MotionEvent e) {
            if (System.currentTimeMillis() - touchTime > 100) {
                pipeline.pauseRendering();
                touchTime = System.currentTimeMillis();
                input.removeTarget(filters.get(curFilter));
                input2.removeTarget(filters.get(curFilter));
                curFilter = (curFilter + 1) % filters.size();
                input.addTarget(filters.get(curFilter));
                input2.addTarget(filters.get(curFilter));
                Toast.makeText(context, filters.get(curFilter).getClass().getSimpleName(), Toast.LENGTH_SHORT).show();
                pipeline.startRendering();
                view.requestRender();
            }
            return false;
        }
    });
    Toast.makeText(this, "Tap the screen to change filter.", Toast.LENGTH_LONG).show();
}
Also used : MultiInputFilter(com.marshalchen.common.uimodule.imageprocessing.filter.MultiInputFilter) Context(android.content.Context) ImageResourceInput(com.marshalchen.common.uimodule.imageprocessing.input.ImageResourceInput) ScreenEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.ScreenEndpoint) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) View(android.view.View) FastImageProcessingPipeline(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingPipeline) MotionEvent(android.view.MotionEvent) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) CameraPreviewInput(com.marshalchen.common.uimodule.imageprocessing.input.CameraPreviewInput)

Example 3 with FastImageProcessingView

use of com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView in project UltimateAndroid by cymcsg.

the class ImageProcessingVideotoImageActivity method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
    view = new FastImageProcessingView(this);
    pipeline = new FastImageProcessingPipeline();
    video = new VideoResourceInput(view, this, R.raw.image_processing_birds);
    edgeDetect = new SobelEdgeDetectionFilter();
    image = new JPGFileEndpoint(this, false, Environment.getExternalStorageDirectory().getAbsolutePath() + "/Pictures/outputImage", false);
    screen = new ScreenEndpoint(pipeline);
    video.addTarget(edgeDetect);
    edgeDetect.addTarget(image);
    edgeDetect.addTarget(screen);
    pipeline.addRootRenderer(video);
    view.setPipeline(pipeline);
    setContentView(view);
    pipeline.startRendering();
    video.startWhenReady();
    view.setOnTouchListener(new OnTouchListener() {

        @Override
        public boolean onTouch(View v, MotionEvent me) {
            if (System.currentTimeMillis() - 100 > touchTime) {
                touchTime = System.currentTimeMillis();
                if (video.isPlaying()) {
                    video.stop();
                } else {
                    video.startWhenReady();
                }
            }
            return true;
        }
    });
}
Also used : JPGFileEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.JPGFileEndpoint) OnTouchListener(android.view.View.OnTouchListener) SobelEdgeDetectionFilter(com.marshalchen.common.uimodule.imageprocessing.filter.processing.SobelEdgeDetectionFilter) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) ScreenEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.ScreenEndpoint) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) View(android.view.View) FastImageProcessingPipeline(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingPipeline) VideoResourceInput(com.marshalchen.common.uimodule.imageprocessing.input.VideoResourceInput) MotionEvent(android.view.MotionEvent)

Example 4 with FastImageProcessingView

use of com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView in project UltimateAndroid by cymcsg.

the class ImageProcessingGenericFilterExample method onCreate.

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    requestWindowFeature(Window.FEATURE_NO_TITLE);
    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
    view = new FastImageProcessingView(this);
    pipeline = new FastImageProcessingPipeline();
    view.setPipeline(pipeline);
    setContentView(view);
    imageIn = new ImageResourceInput(view, this, R.drawable.image_processing_kukulkan);
    generic = setupGenericFilterAsPolkaDot();
    imageOut = new JPGFileEndpoint(this, true, Environment.getExternalStorageDirectory().getAbsolutePath() + "/Pictures/outputImage", true);
    screen = new ScreenEndpoint(pipeline);
    imageIn.addTarget(generic);
    generic.addTarget(imageOut);
    generic.addTarget(screen);
    pipeline.addRootRenderer(imageIn);
    pipeline.startRendering();
}
Also used : JPGFileEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.JPGFileEndpoint) ImageResourceInput(com.marshalchen.common.uimodule.imageprocessing.input.ImageResourceInput) FastImageProcessingView(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView) ScreenEndpoint(com.marshalchen.common.uimodule.imageprocessing.outputs.ScreenEndpoint) FastImageProcessingPipeline(com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingPipeline)

Aggregations

FastImageProcessingPipeline (com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingPipeline)4 FastImageProcessingView (com.marshalchen.common.uimodule.imageprocessing.FastImageProcessingView)4 ScreenEndpoint (com.marshalchen.common.uimodule.imageprocessing.outputs.ScreenEndpoint)4 MotionEvent (android.view.MotionEvent)3 View (android.view.View)3 ImageResourceInput (com.marshalchen.common.uimodule.imageprocessing.input.ImageResourceInput)3 Context (android.content.Context)2 CameraPreviewInput (com.marshalchen.common.uimodule.imageprocessing.input.CameraPreviewInput)2 JPGFileEndpoint (com.marshalchen.common.uimodule.imageprocessing.outputs.JPGFileEndpoint)2 Point (android.graphics.Point)1 PointF (android.graphics.PointF)1 OnTouchListener (android.view.View.OnTouchListener)1 BasicFilter (com.marshalchen.common.uimodule.imageprocessing.filter.BasicFilter)1 MultiInputFilter (com.marshalchen.common.uimodule.imageprocessing.filter.MultiInputFilter)1 SobelEdgeDetectionFilter (com.marshalchen.common.uimodule.imageprocessing.filter.processing.SobelEdgeDetectionFilter)1 VideoResourceInput (com.marshalchen.common.uimodule.imageprocessing.input.VideoResourceInput)1