Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Injimob 1540] enhancements for liveness detection in face verification flow #1562

Draft
wants to merge 8 commits into
base: develop
Choose a base branch
from
46 changes: 25 additions & 21 deletions components/FaceScanner/FaceScanner.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import {Theme} from '.././ui/styleUtils';
import {getRandomInt} from '../../shared/commonUtil';
import {
checkBlink,
cropEyeAreaFromFace,
validateLiveness,
faceDetectorConfig,
getFaceBounds,
imageCaptureConfig,
Expand Down Expand Up @@ -60,7 +60,7 @@ export const FaceScanner: React.FC<FaceScannerProps> = props => {
const [screenColor, setScreenColor] = useState('#0000ff');
const [faceToCompare, setFaceToCompare] = useState(null);
const [opacity, setOpacity] = useState(1);
const [picArray, setPicArray] = useState([]);
const [capturedImages, setCapturedImages] = useState([]);

const screenFlashColors = ['#0000FF', '#00FF00', '#FF0000'];
const MAX_COUNTER = 15;
Expand Down Expand Up @@ -88,7 +88,10 @@ export const FaceScanner: React.FC<FaceScannerProps> = props => {
imageCaptureConfig,
);

setPicArray([...picArray, {color: screenColor, image: capturedImage}]);
setCapturedImages([
...capturedImages,
{screenColor: screenColor, capturedImageUri: capturedImage.uri},
]);

if (counter === randomNumToFaceCompare) {
setFaceToCompare(capturedImage);
Expand All @@ -100,27 +103,16 @@ export const FaceScanner: React.FC<FaceScannerProps> = props => {
}

async function handleFacesDetected({faces}) {
checkBlink(faces[0]);

if (counter == MAX_COUNTER) {
setCounter(counter + 1);
cameraRef.pausePreview();

setScreenColor('#ffffff');
setInfoText(t('faceProcessingInfo'));

const result = await cropEyeAreaFromFace(
picArray,
props.vcImage,
faceToCompare,
);
return result ? props.onValid() : props.onInvalid();
} else if (faces.length > 0) {
if (counter < MAX_COUNTER) {
sree96 marked this conversation as resolved.
Show resolved Hide resolved
if (faces.length > 1) {
setInfoText(t('multipleFacesDetectedGuide'));
return;
}
setInfoText(t('livenessCaptureGuide'));
checkBlink(faces[0]);
const [withinXBounds, withinYBounds, withinYawAngle, withinRollAngle] =
getFaceBounds(faces[0]);

setInfoText(t('faceOutGuide'));

if (
withinXBounds &&
withinYBounds &&
Expand All @@ -135,6 +127,18 @@ export const FaceScanner: React.FC<FaceScannerProps> = props => {
setInfoText(t('faceInGuide'));
await captureImage(screenColor);
}
} else {
cameraRef.pausePreview();

setScreenColor('#ffffff');
setInfoText(t('faceProcessingInfo'));

const isLiveImage = await validateLiveness(
capturedImages,
props.vcImage,
faceToCompare,
);
return isLiveImage ? props.onValid() : props.onInvalid();
}
}

Expand Down
216 changes: 129 additions & 87 deletions components/FaceScanner/FaceScannerHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@ import ImageEditor from '@react-native-community/image-editor';
import {ImageType} from 'expo-camera';
import {getColors} from 'react-native-image-colors';
import {faceCompare} from '@iriscan/biometric-sdk-react-native';
import fileStorage from '../../shared/fileStorage';

let FaceCropPicArray: any[] = new Array();
let EyeCropPicArray: any[] = new Array();
let croppedFaceImages: any[] = new Array();
let croppedEyeImages: any[] = new Array();
let predictedColorResults: any[] = new Array();
let facePoints;
let calculatedThreshold;
let faceCompareOuptut;
let capturedFaceImage;
let croppedFaceImage;
let leftEyeWasClosed = false;
let rightEyeWasClosed = false;
let lastBlinkTimestamp = 0;
Expand Down Expand Up @@ -148,110 +149,151 @@ export const getEyeColorPredictionResult = async (
});
};

export const cropEyeAreaFromFace = async (picArray, vcImage, capturedImage) => {
try {
await Promise.all(
picArray.map(async pic => {
facePoints = (
await FaceDetector.detectFacesAsync(pic.image.uri, faceDetectorConfig)
).faces[0];

if (
facePoints.leftEyeOpenProbability > eyeOpenProbability &&
facePoints.rightEyeOpenProbability > eyeOpenProbability
) {
capturedFaceImage = await ImageEditor.cropImage(pic.image.uri, {
offset: {
x: facePoints.bounds.origin.x,
y: facePoints.bounds.origin.y,
},
size: {
width: facePoints.bounds.size.width,
height: facePoints.bounds.size.height,
},
});

FaceCropPicArray.push({color: pic.color, image: capturedFaceImage});
}
}),
);
const cropFacePortionFromCapturedImage = async ({
screenColor,
capturedImageUri,
}) => {
facePoints = (
await FaceDetector.detectFacesAsync(capturedImageUri, faceDetectorConfig)
).faces[0];

await Promise.all(
FaceCropPicArray.map(async pic => {
let [leftEyeX, leftEyeY, rightEyeX, rightEyeY] =
getNormalizedFacePoints(facePoints);

const leftCroppedImage = await ImageEditor.cropImage(pic.image.uri, {
offset: {
x: leftEyeX - offsetX,
y: leftEyeY - offsetY,
},
size: {
width: offsetX * 2,
height: offsetY / 2 - eyeCropHeightConst,
},
});

const rightCroppedImage = await ImageEditor.cropImage(pic.image.uri, {
offset: {
x: rightEyeX - offsetX,
y: rightEyeY - offsetY,
},
size: {
width: offsetX * 2,
height: offsetY / 2 - eyeCropHeightConst,
},
});

EyeCropPicArray.push({
color: pic.color,
leftEye: leftCroppedImage,
rightEye: rightCroppedImage,
});
}),
);
if (
facePoints.leftEyeOpenProbability > eyeOpenProbability &&
facePoints.rightEyeOpenProbability > eyeOpenProbability
) {
croppedFaceImage = await ImageEditor.cropImage(capturedImageUri, {
offset: {
x: facePoints.bounds.origin.x,
y: facePoints.bounds.origin.y,
},
size: {
width: facePoints.bounds.size.width,
height: facePoints.bounds.size.height,
},
});
croppedFaceImages.push({screenColor, faceImageUri: croppedFaceImage.uri});
await fileStorage.removeItemIfExist(capturedImageUri);
}
};

await Promise.all(
EyeCropPicArray.map(async pic => {
const leftEyeColors = await getColors(pic.leftEye.uri);
const rightEyeColors = await getColors(pic.rightEye.uri);
const cropEyePortionsFromCroppedFaceImages = async ({
screenColor,
faceImageUri,
}) => {
let [leftEyeX, leftEyeY, rightEyeX, rightEyeY] =
getNormalizedFacePoints(facePoints);

const leftCroppedImage = await ImageEditor.cropImage(faceImageUri, {
offset: {
x: leftEyeX - offsetX,
y: leftEyeY - offsetY,
},
size: {
width: offsetX * 2,
height: offsetY / 2 - eyeCropHeightConst,
},
});

const leftRGBAColors = Object.values(leftEyeColors)
.filter(filterColor)
.map(color => hexRgb(color));
const rightCroppedImage = await ImageEditor.cropImage(faceImageUri, {
offset: {
x: rightEyeX - offsetX,
y: rightEyeY - offsetY,
},
size: {
width: offsetX * 2,
height: offsetY / 2 - eyeCropHeightConst,
},
});

const rightRGBAColors = Object.values(rightEyeColors)
.filter(filterColor)
.map(color => hexRgb(color));
croppedEyeImages.push({
screenColor: screenColor,
leftEyeUri: leftCroppedImage.uri,
rightEyeUri: rightCroppedImage.uri,
});
await fileStorage.removeItemIfExist(faceImageUri);
};

const rgbColor = hexRgb(pic.color);
await getEyeColorPredictionResult(leftRGBAColors, rgbColor);
await getEyeColorPredictionResult(rightRGBAColors, rgbColor);
}),
);
} catch (err) {
console.error('Unable to crop the images::', err);
return false;
}
const compareEyeColorsWithScreenColor = async ({
screenColor,
leftEyeUri,
rightEyeUri,
}) => {
const leftEyeColors = await getColors(leftEyeUri);
const rightEyeColors = await getColors(rightEyeUri);

const leftRGBAColors = Object.values(leftEyeColors)
.filter(filterColor)
.map(color => hexRgb(color));

const rightRGBAColors = Object.values(rightEyeColors)
.filter(filterColor)
.map(color => hexRgb(color));

const rgbColor = hexRgb(screenColor);
await getEyeColorPredictionResult(leftRGBAColors, rgbColor);
await getEyeColorPredictionResult(rightRGBAColors, rgbColor);
await fileStorage.removeItemIfExist(leftEyeUri);
await fileStorage.removeItemIfExist(rightEyeUri);
};

const calculateThresholdAndDetectFaceLiveness = async (
vcImage,
randomCapturedImage,
) => {
calculatedThreshold =
predictedColorResults.filter(element => element).length /
predictedColorResults.length;

const matches = rxDataURI.exec(vcImage).groups;
const vcFace = matches.data;

faceCompareOuptut = await faceCompare(vcFace, capturedImage.base64);
faceCompareOuptut = await faceCompare(vcFace, randomCapturedImage.base64);

if (blinkCounter > 0) {
calculatedThreshold = calculatedThreshold + blinkConfidenceScore;
}

if (calculatedThreshold > LIVENESS_THRESHOLD && faceCompareOuptut) {
return true;
} else {
return calculatedThreshold > LIVENESS_THRESHOLD && faceCompareOuptut
? true
: false;
};

export const validateLiveness = async (
capturedImages,
vcImage,
randomCapturedImage,
) => {
try {
await Promise.all(
capturedImages.map(async capturedImage => {
await cropFacePortionFromCapturedImage(capturedImage);
}),
);

await Promise.all(
croppedFaceImages.map(async croppedFaceImage => {
await cropEyePortionsFromCroppedFaceImages(croppedFaceImage);
}),
);
} catch (err) {
console.error('Unable to crop the images::', err);
return false;
}

try {
await Promise.all(
croppedEyeImages.map(async croppedEyeImage => {
compareEyeColorsWithScreenColor(croppedEyeImage);
}),
);
} catch (err) {
console.error(
'Error occured when extracting the colors from eyes and comparing them with screen color::',
err,
);
return false;
}
return calculateThresholdAndDetectFaceLiveness(vcImage, randomCapturedImage);
};

export interface FaceDetectorConfig {
Expand Down
15 changes: 0 additions & 15 deletions components/FaceScanner/LivenessDetection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import {View, TouchableOpacity} from 'react-native';
import Spinner from 'react-native-spinkit';
import {Column, Text} from '.././ui';
import {Theme} from '.././ui/styleUtils';
import Svg, {Defs, Mask, Rect, Ellipse} from 'react-native-svg';
import testIDProps from '../../shared/commonUtil';
import {FaceDetectorConfig} from './FaceScannerHelper';

Expand Down Expand Up @@ -46,20 +45,6 @@ const LivenessDetection: React.FC<LivenessDetectionProps> = ({
onFacesDetected={handleFacesDetected}
faceDetectorSettings={faceDetectorConfig}
/>
<Svg height="100%" width="100%" style={{position: 'absolute'}}>
<Defs>
<Mask id="mask" x="0" y="0" height="100%" width="100%">
<Rect height="100%" width="100%" fill="#fff" opacity="0.3" />
<Ellipse rx="38%" ry="45%" cx="50%" cy="50%" fill="black" />
</Mask>
</Defs>
<Rect
height="100%"
width="100%"
fill="rgba(0, 0, 0, 0.8)"
mask="url(#mask)"
/>
</Svg>
</View>
</View>
</View>
Expand Down
14 changes: 9 additions & 5 deletions locales/ara.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@
"FaceScanner": {
"livenessCaptureGuide": "أمسك الهاتف بثبات، وحافظ على تركيز وجهك في المنتصف.",
"faceProcessingInfo": "يرجى الانتظار بينما نقوم بمعالجة البيانات.",
"faceOutGuide": "أبقِ وجهك داخل الشكل البيضاوي!",
"faceInGuide": "جاري الالتقاط!",
"cancel": "يلغي",
"imageCaptureGuide": "أمسك الهاتف بثبات، وحافظ على تركيز وجهك في المنتصف وانقر على ‘التقاط'",
"capture": "يأسر",
"flipCamera": "فليب الكاميرا"
"flipCamera": "فليب الكاميرا",
"multipleFacesDetectedGuide": "تم اكتشاف وجوه متعددة. تأكد من أنك وحدك في إطار التحقق."
},
"OIDcAuth": {
"title": "مصادقة OIDC",
Expand Down Expand Up @@ -725,9 +725,13 @@
}
},
"postFaceCapture": {
"captureSuccessMessage": "تم التحقق من الوجه بنجاح! يتم بدء مشاركة بيانات الاعتماد.",
"captureFailureTitle": "فشل التحقق من الوجه!",
"captureFailureMessage": "يرجى التأكد من أن وجهك مرئي بوضوح وحاول التقاط صورة شخصية مرة أخرى."
"successMessage": "تم التحقق من الوجه بنجاح! يتم بدء مشاركة بيانات الاعتماد.",
"failureTitle": "فشل التحقق من الوجه!",
"failureMessage": "يرجى التأكد من أن وجهك مرئي بوضوح وحاول التقاط صورة شخصية مرة أخرى.",
"LivenessDetection": {
"retryFailureMessage": "لم نتمكن من مشاركة بطاقتك بسبب فشل اكتشاف النشاط. الرجاء النقر فوق إعادة المحاولة لمحاولة مشاركة بطاقتك مرة أخرى، أو النقر فوق الصفحة الرئيسية للخروج من المشاركة.",
"maxRetryFailureMessage": "آسف! لم نتمكن من مشاركة بطاقتك بسبب فشل اكتشاف النشاط. الرجاء معاودة المحاولة في وقت لاحق."
}
},
"rational": {
"title": "قم بتشغيل موقعك",
Expand Down
Loading
Loading