-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapi.js
113 lines (102 loc) · 3.2 KB
/
api.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
const faceapi = require("face-api.js");
const { Canvas, Image } = require("canvas");
const canvas = require("canvas");
const Student=require("../models/Student")
const Course=require("../models/Course")
const newFace=require("../models/newFace");
const cache = require("../ulilities/cache");
faceapi.env.monkeyPatch({ Canvas, Image });
async function run()
{
await LoadModels()
}
run()
async function LoadModels() {
await faceapi.nets.faceRecognitionNet.loadFromDisk(__dirname + "/models");
await faceapi.nets.faceLandmark68Net.loadFromDisk(__dirname + "/models");
await faceapi.nets.ssdMobilenetv1.loadFromDisk(__dirname + "/models");
console.log("loaded");
}
async function getDes(images) {
try {
const descriptions = [];
for (let i = 0; i < images.length; i++) {
const img = await canvas.loadImage(images[i]);
const detections = await faceapi
.detectSingleFace(img)
.withFaceLandmarks()
.withFaceDescriptor();
descriptions.push(detections.descriptor);
}
return descriptions;
} catch (error) {
console.log(error);
return 0;
}
}
async function matchFace(code,faces, image) {
let f=[];
if(cache.code===code)
{
f=faces
}
else{
for (i = 0; i < faces.length; i++) {
for (j = 0; j < faces[i].descriptions.length; j++) {
faces[i].descriptions[j] = new Float32Array(
Object.values(faces[i].descriptions[j])
);
}
f.push ( new faceapi.LabeledFaceDescriptors(faces[i].label, faces[i].descriptions))
}
cache.code=code;
cache.faces=f
}
const faceMatcher = new faceapi.FaceMatcher(f, 0.6);
const img = await canvas.loadImage(image);
// let temp = faceapi.createCanvasFromMedia(img);
// const displaySize = { width: img.width, height: img.height };
// faceapi.matchDimensions(temp, displaySize);
const detections = await faceapi
.detectAllFaces(img)
.withFaceLandmarks()
.withFaceDescriptors();
// const resizedDetections = faceapi.resizeResults(detections, displaySize);
// const results = resizedDetections.map((d) =>
// faceMatcher.findBestMatch(d.descriptor)
// );
const results = detections.map((d) =>{
let r=faceMatcher.findBestMatch(d.descriptor)
newFace.addFace(r,d.descriptor)
return r
}
);
return results;
}
async function matchFaceDes(code,faces,detections){
let f=[];
if(cache.code===code)
{
f=faces
}
else{
for (i = 0; i < faces.length; i++) {
for (j = 0; j < faces[i].descriptions.length; j++) {
faces[i].descriptions[j] = new Float32Array(
Object.values(faces[i].descriptions[j])
);
}
f.push ( new faceapi.LabeledFaceDescriptors(faces[i].label, faces[i].descriptions))
}
cache.code=code;
cache.faces=f
}
const faceMatcher = new faceapi.FaceMatcher(f, 0.6);
const results = detections.map((d) =>{
let r=faceMatcher.findBestMatch(d.descriptor)
newFace.addFace(r,d.descriptor)
return r
})
return results;
}
module.exports = { faceapi,matchFace,matchFaceDes, getDes, LoadModels };