Search in sources :

Example 1 with Face

use of com.google.mlkit.vision.face.Face in project react-native-camera by react-native-community.

the class FileFaceDetectionAsyncTask method serializeEventData.

private void serializeEventData(List<Face> faces) {
    WritableMap result = Arguments.createMap();
    WritableArray facesArray = Arguments.createArray();
    for (Face face : faces) {
        WritableMap encodedFace = FaceDetectorUtils.serializeFace(face);
        encodedFace.putDouble("yawAngle", (-encodedFace.getDouble("yawAngle") + 360) % 360);
        encodedFace.putDouble("rollAngle", (-encodedFace.getDouble("rollAngle") + 360) % 360);
        facesArray.pushMap(encodedFace);
    }
    result.putArray("faces", facesArray);
    WritableMap image = Arguments.createMap();
    image.putInt("width", mWidth);
    image.putInt("height", mHeight);
    image.putInt("orientation", mOrientation);
    image.putString("uri", mUri);
    result.putMap("image", image);
    mRNFaceDetector.release();
    mPromise.resolve(result);
}
Also used : WritableMap(com.facebook.react.bridge.WritableMap) WritableArray(com.facebook.react.bridge.WritableArray) Face(com.google.mlkit.vision.face.Face)

Example 2 with Face

use of com.google.mlkit.vision.face.Face in project react-native-camera by react-native-community.

the class FileFaceDetectionAsyncTask method doInBackground.

@Override
protected Void doInBackground(Void... voids) {
    if (isCancelled()) {
        return null;
    }
    mRNFaceDetector = detectorForOptions(mOptions, mContext);
    try {
        ExifInterface exif = new ExifInterface(mPath);
        mOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
    } catch (IOException e) {
        Log.e(ERROR_TAG, "Reading orientation from file `" + mPath + "` failed.", e);
    }
    try {
        InputImage image = InputImage.fromFilePath(mContext, Uri.parse(mUri));
        FaceDetector detector = mRNFaceDetector.getDetector();
        detector.process(image).addOnSuccessListener(new OnSuccessListener<List<Face>>() {

            @Override
            public void onSuccess(List<Face> faces) {
                serializeEventData(faces);
            }
        }).addOnFailureListener(new OnFailureListener() {

            @Override
            public void onFailure(Exception e) {
                Log.e(ERROR_TAG, "Text recognition task failed", e);
                mPromise.reject(ERROR_TAG, "Text recognition task failed", e);
            }
        });
    } catch (IOException e) {
        e.printStackTrace();
        Log.e(ERROR_TAG, "Creating Firebase Image from uri" + mUri + "failed", e);
        mPromise.reject(ERROR_TAG, "Creating Firebase Image from uri" + mUri + "failed", e);
    }
    return null;
}
Also used : RNFaceDetector(org.reactnative.facedetector.RNFaceDetector) FaceDetector(com.google.mlkit.vision.face.FaceDetector) ExifInterface(androidx.exifinterface.media.ExifInterface) List(java.util.List) IOException(java.io.IOException) Face(com.google.mlkit.vision.face.Face) OnSuccessListener(com.google.android.gms.tasks.OnSuccessListener) InputImage(com.google.mlkit.vision.common.InputImage) OnFailureListener(com.google.android.gms.tasks.OnFailureListener) IOException(java.io.IOException)

Example 3 with Face

use of com.google.mlkit.vision.face.Face in project react-native-camera by react-native-community.

the class FaceDetectorAsyncTask method doInBackground.

@Override
protected Void doInBackground(Void... ignored) {
    if (isCancelled() || mDelegate == null || mFaceDetector == null) {
        return null;
    }
    InputImage image = InputImage.fromByteArray(mImageData, mWidth, mHeight, getFirebaseRotation(), InputImage.IMAGE_FORMAT_YV12);
    FaceDetector detector = mFaceDetector.getDetector();
    detector.process(image).addOnSuccessListener(new OnSuccessListener<List<Face>>() {

        @Override
        public void onSuccess(List<Face> faces) {
            WritableArray facesList = serializeEventData(faces);
            mDelegate.onFacesDetected(facesList);
            mDelegate.onFaceDetectingTaskCompleted();
        }
    }).addOnFailureListener(new OnFailureListener() {

        @Override
        public void onFailure(Exception e) {
            Log.e(TAG, "Text recognition task failed" + e);
            mDelegate.onFaceDetectingTaskCompleted();
        }
    });
    return null;
}
Also used : RNFaceDetector(org.reactnative.facedetector.RNFaceDetector) FaceDetector(com.google.mlkit.vision.face.FaceDetector) WritableArray(com.facebook.react.bridge.WritableArray) List(java.util.List) Face(com.google.mlkit.vision.face.Face) OnSuccessListener(com.google.android.gms.tasks.OnSuccessListener) InputImage(com.google.mlkit.vision.common.InputImage) OnFailureListener(com.google.android.gms.tasks.OnFailureListener)

Example 4 with Face

use of com.google.mlkit.vision.face.Face in project react-native-camera by react-native-community.

the class FileFaceDetectionAsyncTask method onPostExecute.

@Override
protected void onPostExecute(List<Face> faces) {
    super.onPostExecute(faces);
    WritableMap result = Arguments.createMap();
    WritableArray facesArray = Arguments.createArray();
    for (int i = 0; i < faces.size(); i++) {
        Face face = faces.get(i);
        WritableMap encodedFace = FaceDetectorUtils.serializeFace(face);
        encodedFace.putDouble("yawAngle", (-encodedFace.getDouble("yawAngle") + 360) % 360);
        encodedFace.putDouble("rollAngle", (-encodedFace.getDouble("rollAngle") + 360) % 360);
        facesArray.pushMap(encodedFace);
    }
    result.putArray("faces", facesArray);
    WritableMap image = Arguments.createMap();
    image.putInt("width", mWidth);
    image.putInt("height", mHeight);
    image.putInt("orientation", mOrientation);
    image.putString("uri", mUri);
    result.putMap("image", image);
    mRNFaceDetector.release();
    mPromise.resolve(result);
}
Also used : WritableMap(com.facebook.react.bridge.WritableMap) WritableArray(com.facebook.react.bridge.WritableArray) Face(com.google.mlkit.vision.face.Face)

Example 5 with Face

use of com.google.mlkit.vision.face.Face in project react-native-camera by lwansbrough.

the class FaceDetectorAsyncTask method serializeEventData.

private WritableArray serializeEventData(List<Face> faces) {
    WritableArray facesList = Arguments.createArray();
    for (Face face : faces) {
        WritableMap serializedFace = FaceDetectorUtils.serializeFace(face, mScaleX, mScaleY, mWidth, mHeight, mPaddingLeft, mPaddingTop);
        if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
            serializedFace = FaceDetectorUtils.rotateFaceX(serializedFace, mImageDimensions.getWidth(), mScaleX);
        } else {
            serializedFace = FaceDetectorUtils.changeAnglesDirection(serializedFace);
        }
        facesList.pushMap(serializedFace);
    }
    return facesList;
}
Also used : WritableMap(com.facebook.react.bridge.WritableMap) WritableArray(com.facebook.react.bridge.WritableArray) Face(com.google.mlkit.vision.face.Face)

Aggregations

Face (com.google.mlkit.vision.face.Face)10 WritableArray (com.facebook.react.bridge.WritableArray)8 WritableMap (com.facebook.react.bridge.WritableMap)6 OnFailureListener (com.google.android.gms.tasks.OnFailureListener)4 OnSuccessListener (com.google.android.gms.tasks.OnSuccessListener)4 InputImage (com.google.mlkit.vision.common.InputImage)4 FaceDetector (com.google.mlkit.vision.face.FaceDetector)4 List (java.util.List)4 RNFaceDetector (org.reactnative.facedetector.RNFaceDetector)4 ExifInterface (androidx.exifinterface.media.ExifInterface)2 IOException (java.io.IOException)2