/
face-identity.js
95 lines (90 loc) · 4.42 KB
/
face-identity.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
const cv = require('opencv4nodejs');
const classifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_ALT2);
const fs = require('fs')
const _ = require('lodash');
const path = require('path');
const glob = require('glob');
const uuid = require('uuid');
const imageTypes= ['jpeg','jpg','pjpeg','x-png','png'].join(",")
const Recognizers = {
lbph: new cv.LBPHFaceRecognizer(),
eigen: new cv.EigenFaceRecognizer(),
fisher: new cv.FisherFaceRecognizer()
}
module.exports = {
FaceDetectFS: (DIR, MatchImageBase64, SelectedRecognizer) => {
const files = fs.readdirSync(DIR);
const personNames = [];
const images = [];
const imageLabel = [];
let matrixArray = [];
let trainingLabelArray = [];
const captureImageFileName = DIR + '/temp.jpeg';
fs.writeFileSync(captureImageFileName, MatchImageBase64.replace(/^data:image\/jpeg;base64,/, ""), 'base64');
_.each(files, (currentFile) => {
const dirCheck = DIR + "/" + currentFile;
if (fs.statSync(dirCheck).isDirectory()) {
const allFiles = fs.readdirSync(dirCheck);
const subDirectory = _.filter(allFiles,file=> _.find(imageTypes.split(","),type=> path.extname(file).toLowerCase()===`.${type}`));
if (subDirectory && subDirectory.length > 0) {
personNames.push(currentFile);
imageLabel.push(subDirectory);
const imagePath = DIR + '/' + currentFile + '/';
images.push(subDirectory
.map(file => path.resolve(imagePath, file))
.map(filePath => cv.imread(filePath))
.map(img => img.bgrToGray())
.map(getFaceImage)
.map(faceImg => faceImg.resize(80, 80)));
}
}
});
_.each(images, (matrix, index) => {
const trainingLabel = imageLabel[index]
.map(file => personNames.findIndex(name => file.includes(name)));
matrixArray = [...matrixArray, ...matrix];
trainingLabelArray = [...trainingLabelArray, ...trainingLabel];
});
const currentImage = getFaceImage(cv.imread(captureImageFileName).bgrToGray()).resize(80, 80);
Recognizers[SelectedRecognizer].train(matrixArray, trainingLabelArray);
const prediction = Recognizers[SelectedRecognizer].predict(currentImage);
fs.unlinkSync(captureImageFileName);
return {
Name: personNames[prediction.label],
ProbabilityPercentage: prediction.confidence
};
},
FaceDetectFSSearch: (SearchDIR, PersonNames, MatchImageBase64, SelectedRecognizer) => {
const images = [];
let trainingLabelArray = [];
const captureImageFileName = SearchDIR + `/${uuid.v1()}.jpeg`;
fs.writeFileSync(captureImageFileName, MatchImageBase64.replace(/^data:image\/jpeg;base64,/, ""), 'base64');
const PNames=PersonNames.sort();
const searchList = [...PNames, ..._.map(PNames, name => name.toLowerCase())].join(",");
const imageTrainingSet = glob.sync(`${SearchDIR}/*{${searchList}}*.{${imageTypes}}`);
for (i = 0; i < PNames.length; i++) {
const trainingLabel = _.filter(imageTrainingSet,item=>item.toLowerCase().includes(PNames[i].toLowerCase()))
trainingLabelArray = [...trainingLabelArray,...Array(trainingLabel.length).fill(i)]
// const trainingLabel = _.filter(imageTrainingSet,item=>item.match(`/${PersonNames[i]}/ig`))
}
images.push(imageTrainingSet.map(filePath => cv.imread(filePath))
.map(img => img.bgrToGray())
.map(getFaceImage)
.map(faceImg => faceImg.resize(80, 80)));
const currentImage = getFaceImage(cv.imread(captureImageFileName).bgrToGray()).resize(80, 80);
Recognizers[SelectedRecognizer].train(images[0], trainingLabelArray);
const prediction = Recognizers[SelectedRecognizer].predict(currentImage);
fs.unlinkSync(captureImageFileName);
return {
Name: PNames[prediction.label],
ProbabilityPercentage: prediction.confidence
};
}
}
const getFaceImage = (grayImg) => {
const faceRects = classifier.detectMultiScale(grayImg).objects;
if (!faceRects.length) {
throw new Error('failed to detect faces');
}
return grayImg.getRegion(faceRects[0]);
};