use of android.graphics.PointF in project LoadingDrawable by dinuscxj.
the class ElectricFanLoadingRenderer method getPoint1.
//get the pointF which belong to the right half side
private PointF getPoint1(RectF leafFlyRect) {
PointF point = new PointF();
point.x = leafFlyRect.right - mRandom.nextInt((int) (leafFlyRect.width() / 2));
point.y = (int) (leafFlyRect.bottom - mRandom.nextInt((int) leafFlyRect.height()));
return point;
}
use of android.graphics.PointF in project MyDiary by erttyy8821.
the class PageEffectView method getCross.
/**
* Author : hmg25 Version: 1.0 Description : 求解直线P1P2和直线P3P4的交点坐标
*/
public PointF getCross(PointF P1, PointF P2, PointF P3, PointF P4) {
PointF CrossP = new PointF();
// 二元函数通式: y=ax+b
float a1 = (P2.y - P1.y) / (P2.x - P1.x);
float b1 = ((P1.x * P2.y) - (P2.x * P1.y)) / (P1.x - P2.x);
float a2 = (P4.y - P3.y) / (P4.x - P3.x);
float b2 = ((P3.x * P4.y) - (P4.x * P3.y)) / (P3.x - P4.x);
CrossP.x = (b2 - b1) / (a1 - a2);
CrossP.y = a1 * CrossP.x + b1;
return CrossP;
}
use of android.graphics.PointF in project android_frameworks_base by ParanoidAndroid.
the class PagedView method isFlingingToDelete.
/*
* Flinging to delete - IN PROGRESS
*/
private PointF isFlingingToDelete() {
ViewConfiguration config = ViewConfiguration.get(getContext());
mVelocityTracker.computeCurrentVelocity(1000, config.getScaledMaximumFlingVelocity());
if (mVelocityTracker.getYVelocity() < mFlingToDeleteThresholdVelocity) {
// Do a quick dot product test to ensure that we are flinging upwards
PointF vel = new PointF(mVelocityTracker.getXVelocity(), mVelocityTracker.getYVelocity());
PointF upVec = new PointF(0f, -1f);
float theta = (float) Math.acos(((vel.x * upVec.x) + (vel.y * upVec.y)) / (vel.length() * upVec.length()));
if (theta <= Math.toRadians(FLING_TO_DELETE_MAX_FLING_DEGREES)) {
return vel;
}
}
return null;
}
use of android.graphics.PointF in project StickerCamera by Skykai521.
the class GPUImageFilterTools method createFilterForType.
public static GPUImageFilter createFilterForType(final Context context, final FilterType type) {
GPUImageToneCurveFilter curveFilter = new GPUImageToneCurveFilter();
switch(type) {
case NORMAL:
return new GPUImageFilter();
case ACV_AIMEI:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.aimei));
return curveFilter;
case ACV_DANLAN:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.danlan));
return curveFilter;
case ACV_DANHUANG:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.danhuang));
return curveFilter;
case ACV_FUGU:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.fugu));
return curveFilter;
case ACV_GAOLENG:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.gaoleng));
return curveFilter;
case ACV_HUAIJIU:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.huaijiu));
return curveFilter;
case ACV_JIAOPIAN:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.jiaopian));
return curveFilter;
case ACV_KEAI:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.keai));
return curveFilter;
case ACV_LOMO:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.lomo));
return curveFilter;
case ACV_MORENJIAQIANG:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.morenjiaqiang));
return curveFilter;
case ACV_NUANXIN:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.nuanxin));
return curveFilter;
case ACV_QINGXIN:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.qingxin));
return curveFilter;
case ACV_RIXI:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.rixi));
return curveFilter;
case ACV_WENNUAN:
curveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.wennuan));
return curveFilter;
case CONTRAST:
return new GPUImageContrastFilter(2.0f);
case GAMMA:
return new GPUImageGammaFilter(2.0f);
case INVERT:
return new GPUImageColorInvertFilter();
case PIXELATION:
return new GPUImagePixelationFilter();
case HUE:
return new GPUImageHueFilter(90.0f);
case BRIGHTNESS:
return new GPUImageBrightnessFilter(1.5f);
case GRAYSCALE:
return new GPUImageGrayscaleFilter();
case SEPIA:
return new GPUImageSepiaFilter();
case SHARPEN:
GPUImageSharpenFilter sharpness = new GPUImageSharpenFilter();
sharpness.setSharpness(2.0f);
return sharpness;
case SOBEL_EDGE_DETECTION:
return new GPUImageSobelEdgeDetection();
case THREE_X_THREE_CONVOLUTION:
GPUImage3x3ConvolutionFilter convolution = new GPUImage3x3ConvolutionFilter();
convolution.setConvolutionKernel(new float[] { -1.0f, 0.0f, 1.0f, -2.0f, 0.0f, 2.0f, -1.0f, 0.0f, 1.0f });
return convolution;
case EMBOSS:
return new GPUImageEmbossFilter();
case POSTERIZE:
return new GPUImagePosterizeFilter();
case FILTER_GROUP:
List<GPUImageFilter> filters = new LinkedList<GPUImageFilter>();
filters.add(new GPUImageContrastFilter());
filters.add(new GPUImageDirectionalSobelEdgeDetectionFilter());
filters.add(new GPUImageGrayscaleFilter());
return new GPUImageFilterGroup(filters);
case SATURATION:
return new GPUImageSaturationFilter(1.0f);
case EXPOSURE:
return new GPUImageExposureFilter(0.0f);
case HIGHLIGHT_SHADOW:
return new GPUImageHighlightShadowFilter(0.0f, 1.0f);
case MONOCHROME:
return new GPUImageMonochromeFilter(1.0f, new float[] { 0.6f, 0.45f, 0.3f, 1.0f });
case OPACITY:
return new GPUImageOpacityFilter(1.0f);
case RGB:
return new GPUImageRGBFilter(1.0f, 1.0f, 1.0f);
case WHITE_BALANCE:
return new GPUImageWhiteBalanceFilter(5000.0f, 0.0f);
case VIGNETTE:
PointF centerPoint = new PointF();
centerPoint.x = 0.5f;
centerPoint.y = 0.5f;
return new GPUImageVignetteFilter(centerPoint, new float[] { 0.0f, 0.0f, 0.0f }, 0.3f, 0.75f);
case TONE_CURVE:
GPUImageToneCurveFilter toneCurveFilter = new GPUImageToneCurveFilter();
toneCurveFilter.setFromCurveFileInputStream(context.getResources().openRawResource(R.raw.tone_cuver_sample));
return toneCurveFilter;
case BLEND_DIFFERENCE:
return createBlendFilter(context, GPUImageDifferenceBlendFilter.class);
case BLEND_SOURCE_OVER:
return createBlendFilter(context, GPUImageSourceOverBlendFilter.class);
case BLEND_COLOR_BURN:
return createBlendFilter(context, GPUImageColorBurnBlendFilter.class);
case BLEND_COLOR_DODGE:
return createBlendFilter(context, GPUImageColorDodgeBlendFilter.class);
case BLEND_DARKEN:
return createBlendFilter(context, GPUImageDarkenBlendFilter.class);
case BLEND_DISSOLVE:
return createBlendFilter(context, GPUImageDissolveBlendFilter.class);
case BLEND_EXCLUSION:
return createBlendFilter(context, GPUImageExclusionBlendFilter.class);
case BLEND_HARD_LIGHT:
return createBlendFilter(context, GPUImageHardLightBlendFilter.class);
case BLEND_LIGHTEN:
return createBlendFilter(context, GPUImageLightenBlendFilter.class);
case BLEND_ADD:
return createBlendFilter(context, GPUImageAddBlendFilter.class);
case BLEND_DIVIDE:
return createBlendFilter(context, GPUImageDivideBlendFilter.class);
case BLEND_MULTIPLY:
return createBlendFilter(context, GPUImageMultiplyBlendFilter.class);
case BLEND_OVERLAY:
return createBlendFilter(context, GPUImageOverlayBlendFilter.class);
case BLEND_SCREEN:
return createBlendFilter(context, GPUImageScreenBlendFilter.class);
case BLEND_ALPHA:
return createBlendFilter(context, GPUImageAlphaBlendFilter.class);
case BLEND_COLOR:
return createBlendFilter(context, GPUImageColorBlendFilter.class);
case BLEND_HUE:
return createBlendFilter(context, GPUImageHueBlendFilter.class);
case BLEND_SATURATION:
return createBlendFilter(context, GPUImageSaturationBlendFilter.class);
case BLEND_LUMINOSITY:
return createBlendFilter(context, GPUImageLuminosityBlendFilter.class);
case BLEND_LINEAR_BURN:
return createBlendFilter(context, GPUImageLinearBurnBlendFilter.class);
case BLEND_SOFT_LIGHT:
return createBlendFilter(context, GPUImageSoftLightBlendFilter.class);
case BLEND_SUBTRACT:
return createBlendFilter(context, GPUImageSubtractBlendFilter.class);
case BLEND_CHROMA_KEY:
return createBlendFilter(context, GPUImageChromaKeyBlendFilter.class);
case BLEND_NORMAL:
return createBlendFilter(context, GPUImageNormalBlendFilter.class);
case LOOKUP_AMATORKA:
GPUImageLookupFilter amatorka = new GPUImageLookupFilter();
amatorka.setBitmap(BitmapFactory.decodeResource(context.getResources(), R.drawable.lookup_amatorka));
return amatorka;
case GAUSSIAN_BLUR:
return new GPUImageGaussianBlurFilter();
case CROSSHATCH:
return new GPUImageCrosshatchFilter();
case BOX_BLUR:
return new GPUImageBoxBlurFilter();
case CGA_COLORSPACE:
return new GPUImageCGAColorspaceFilter();
case DILATION:
return new GPUImageDilationFilter();
case KUWAHARA:
return new GPUImageKuwaharaFilter();
case RGB_DILATION:
return new GPUImageRGBDilationFilter();
case SKETCH:
return new GPUImageSketchFilter();
case TOON:
return new GPUImageToonFilter();
case SMOOTH_TOON:
return new GPUImageSmoothToonFilter();
case BULGE_DISTORTION:
return new GPUImageBulgeDistortionFilter();
case GLASS_SPHERE:
return new GPUImageGlassSphereFilter();
case HAZE:
return new GPUImageHazeFilter();
case LAPLACIAN:
return new GPUImageLaplacianFilter();
case NON_MAXIMUM_SUPPRESSION:
return new GPUImageNonMaximumSuppressionFilter();
case SPHERE_REFRACTION:
return new GPUImageSphereRefractionFilter();
case SWIRL:
return new GPUImageSwirlFilter();
case WEAK_PIXEL_INCLUSION:
return new GPUImageWeakPixelInclusionFilter();
case FALSE_COLOR:
return new GPUImageFalseColorFilter();
case COLOR_BALANCE:
return new GPUImageColorBalanceFilter();
default:
throw new IllegalStateException("No filter of that type!");
}
}
use of android.graphics.PointF in project Fairphone by Kwamecorp.
the class DragController method onTouchEvent.
/**
* Call this from a drag source view.
*/
public boolean onTouchEvent(MotionEvent ev) {
if (!mDragging && mCurrentState == MovementState.NORMAL) {
return false;
}
// Update the velocity tracker
acquireVelocityTrackerAndAddMovement(ev);
final int action = ev.getAction();
final int[] dragLayerPos = getClampedDragLayerPos(ev.getX(), ev.getY());
final int dragLayerX = dragLayerPos[0];
final int dragLayerY = dragLayerPos[1];
switch(action) {
case MotionEvent.ACTION_DOWN:
if (mCurrentState == MovementState.NORMAL) {
// Remember where the motion event started
mMotionDownX = dragLayerX;
mMotionDownY = dragLayerY;
if ((dragLayerX < mScrollZone) || (dragLayerX > mScrollView.getWidth() - mScrollZone)) {
mScrollState = SCROLL_WAITING_IN_ZONE;
mHandler.postDelayed(mScrollRunnable, SCROLL_DELAY);
} else {
mScrollState = SCROLL_OUTSIDE_ZONE;
}
}
break;
case MotionEvent.ACTION_MOVE:
if (mCurrentState == MovementState.NORMAL) {
handleMoveEvent(dragLayerX, dragLayerY);
} else {
updateMenuSelection(ev);
}
break;
case MotionEvent.ACTION_UP:
if (mCurrentState == MovementState.NORMAL) {
handleMoveEvent(dragLayerX, dragLayerY);
mHandler.removeCallbacks(mScrollRunnable);
if (mDragging) {
PointF vec = isFlingingToDelete(mDragObject.dragSource);
if (vec != null) {
dropOnFlingToDeleteTarget(dragLayerX, dragLayerY, vec);
} else {
drop(dragLayerX, dragLayerY);
}
}
endDrag();
} else {
stopMenuSelection(ev);
}
mCurrentState = MovementState.NORMAL;
canShowMenuInThisMovement = true;
break;
case MotionEvent.ACTION_CANCEL:
if (mCurrentState == MovementState.NORMAL) {
mHandler.removeCallbacks(mScrollRunnable);
cancelDrag();
} else {
stopMenuSelection(ev);
}
mCurrentState = MovementState.NORMAL;
canShowMenuInThisMovement = true;
break;
}
return true;
}
Aggregations