Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Face Detection Cleanup and Options #92

Merged
merged 5 commits into from
Apr 29, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion Gruntfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,13 @@ module.exports = function(grunt) {
},
uglify: {
options: {
report: 'gzip',
report: 'min',
preserveComments: 'false',
mangle: {
except: ['clmtrackr']
},
compress: {
drop_console: true
}
},
dist: {
Expand Down
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,14 @@ You can also use the built in function ```draw()``` to draw the tracked facial m

See the complete example [here](https://auduno.github.com/clmtrackr/example.html).

### Development ###

First, install [node.js](http://nodejs.org/) with npm, then install [Grunt](http://gruntjs.com/getting-started).

In the root directory of clmtrackr, run `npm install` then run `grunt`. This will create `clmtrackr.min.js` and `clmtrackr.js`.

To test the examples locally, you need to run a local server. One easy way to do this is to install `http-server`, a small node.js utility: `npm install -g http-server`. Then run `http-server` in the root of clmtrackr and go to `http://localhost:8080/examples` in your browser.

### License ###

**clmtrackr** is distributed under the [MIT License](http://www.opensource.org/licenses/MIT)
160 changes: 80 additions & 80 deletions clmtrackr.js

Large diffs are not rendered by default.

7 changes: 0 additions & 7 deletions clmtrackr.min.js

This file was deleted.

2 changes: 1 addition & 1 deletion examples/clm_image.html
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ <h2>Face tracking in images</h2>
<p>This is an example of precise face-tracking in an image using the javascript library <a href="https://github.com/auduno/clmtrackr"><em>clmtrackr</em></a>. To try it out, simply click start.</p>
<span id="loadimagetext"><p>To try it out with your own image, choose a file above by clicking "choose file". If the tracking has problems, try selecting the face in the image manually by clicking "manually select face", and click and hold to drag a square around the face in the image.</p></span>
</div>
<p>The image is from the <a href="http://www-prima.inrialpes.fr/FGnet/data/01-TalkingFace/talking_face.html">FG-net Talking Face</a> project</p>
<p>The image is from the <a href="http://www-prima.inrialpes.fr/FGnet/data/01-TalkingFace/talking_face.html">FG-net Talking Face</a> project</p>
<a href="https://github.com/auduno/clmtrackr"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://s3.amazonaws.com/github/ribbons/forkme_left_green_007200.png" alt="Fork me on GitHub"></a>

<script>
Expand Down
12 changes: 6 additions & 6 deletions examples/ext_js/frontalface.js
100644 → 100755

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion examples/ext_js/jsfeat-min.js
100644 → 100755

Large diffs are not rendered by default.

114 changes: 61 additions & 53 deletions examples/ext_js/jsfeat_detect.js
Original file line number Diff line number Diff line change
@@ -1,60 +1,68 @@
// simple wrapper for jsfeat face detector
// requires jsfeat

var jsfeat_face = function(image) {

var img_u8,work_canvas,work_ctx,ii_sum,ii_sqsum,ii_tilted,edg;

var w = image.width;
var h = image.height;

if (image.tagName == 'VIDEO' || image.tagName == 'IMG') {
work_canvas = document.createElement('canvas');
work_canvas.height = h;
work_canvas.width = w;
work_ctx = work_canvas.getContext('2d');
} else if (image.tagName == 'CANVAS') {
work_ctx = image.getContext('2d');
}

img_u8 = new jsfeat.matrix_t(w, h, jsfeat.U8_t | jsfeat.C1_t);
ii_sum = new Int32Array((w+1)*(h+1));
ii_sqsum = new Int32Array((w+1)*(h+1));
ii_tilted = new Int32Array((w+1)*(h+1));

var jsfeat_face = function(video, maxWorkSize) {
var videoWidth = video.width;
var videoHeight = video.height;

var classifier = jsfeat.haar.frontalface;

this.findFace = function () {
if (image.tagName == 'VIDEO' || image.tagName == 'IMG') {
work_ctx.drawImage(image, 0, 0);
}
var imageData = work_ctx.getImageData(0, 0, w, h);

jsfeat.imgproc.grayscale(imageData.data, img_u8.data);

jsfeat.imgproc.equalize_histogram(img_u8, img_u8);

jsfeat.imgproc.compute_integral_image(img_u8, ii_sum, ii_sqsum, null);

var rects = jsfeat.haar.detect_multi_scale(ii_sum, ii_sqsum, ii_tilted, null, img_u8.cols, img_u8.rows, classifier, 1.15, 2);

var scale = Math.min(maxWorkSize/videoWidth, maxWorkSize/videoHeight);
var w = (videoWidth*scale)|0;
var h = (videoHeight*scale)|0;

var img_u8 = new jsfeat.matrix_t(w, h, jsfeat.U8_t | jsfeat.C1_t);
var edg = new jsfeat.matrix_t(w, h, jsfeat.U8_t | jsfeat.C1_t);
var work_canvas = document.createElement('canvas');
work_canvas.width = w;
work_canvas.height = h;
var work_ctx = work_canvas.getContext('2d');
var ii_sum = new Int32Array((w+1)*(h+1));
var ii_sqsum = new Int32Array((w+1)*(h+1));
var ii_tilted = new Int32Array((w+1)*(h+1));
var ii_canny = new Int32Array((w+1)*(h+1));

this.findFace = function (params) {
work_ctx.drawImage(video, 0, 0, work_canvas.width, work_canvas.height);
var imageData = work_ctx.getImageData(0, 0, work_canvas.width, work_canvas.height);

jsfeat.imgproc.grayscale(imageData.data, work_canvas.width, work_canvas.height, img_u8);

// possible params
if(params.equalizeHistogram) {
jsfeat.imgproc.equalize_histogram(img_u8, img_u8);
}
//jsfeat.imgproc.gaussian_blur(img_u8, img_u8, 3);

jsfeat.imgproc.compute_integral_image(img_u8, ii_sum, ii_sqsum, classifier.tilted ? ii_tilted : null);

if(params.useCanny) {
jsfeat.imgproc.canny(img_u8, edg, 10, 50);
jsfeat.imgproc.compute_integral_image(edg, ii_canny, null, null);
}

jsfeat.haar.edgesDensity = params.edgesDensity;
var rects = jsfeat.haar.detect_multi_scale(ii_sum, ii_sqsum, ii_tilted, params.useCanny? ii_canny : null, img_u8.cols, img_u8.rows, classifier, params.scaleFactor, params.minScale);
rects = jsfeat.haar.group_rectangles(rects, 1);

var rl = rects.length;

if (rl > 0) {
var best = rects[0];
for (var i = 1;i < rl;i++) {
if (rects[i].neighbors > best.neighbors) {
best = rects[i]
} else if (rects[i].neighbors == best.neighbors) {
if (rects[i].confidence > best.confidence) best = rects[i];
}
}
return [best];
} else {

if(rl == 0) {
return false;
}

var best = rects[0];
for (var i = 1; i < rl; i++) {
if (rects[i].neighbors > best.neighbors) {
best = rects[i]
} else if (rects[i].neighbors == best.neighbors) {
// if (rects[i].width > best.width) best = rects[i]; // use biggest rect
if (rects[i].confidence > best.confidence) best = rects[i]; // use most confident rect
}
}

var sc = videoWidth / img_u8.cols;
best.x = (best.x*sc)|0;
best.y = (best.y*sc)|0;
best.width = (best.width*sc)|0;
best.height = (best.height*sc)|0;

return best;
}

}
36 changes: 14 additions & 22 deletions js/clm.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,14 @@ var clm = {
if (params.stopOnConvergence === undefined) params.stopOnConvergence = false;
if (params.weightPoints === undefined) params.weightPoints = undefined;
if (params.sharpenResponse === undefined) params.sharpenResponse = false;

if (params.faceDetection === undefined) params.faceDetection = {};
if (params.faceDetection.workSize === undefined) params.faceDetection.workSize = 160;
if (params.faceDetection.minScale === undefined) params.faceDetection.minScale = 2;
if (params.faceDetection.scaleFactor === undefined) params.faceDetection.scaleFactor = 1.15;
if (params.faceDetection.useCanny === undefined) params.faceDetection.useCanny = false;
if (params.faceDetection.edgesDensity === undefined) params.faceDetection.edgesDensity = 0.13;
if (params.faceDetection.equalizeHistogram === undefined) params.faceDetection.equalizeHistogram = true;

var numPatches, patchSize, numParameters, patchType;
var gaussianPD;
Expand Down Expand Up @@ -70,7 +78,7 @@ var clm = {

var facecheck_count = 0;

var webglFi, svmFi, mosseCalc;
var webglFi, svmFi, mosseCalc, jf;

var scoringCanvas = document.createElement('canvas');
//document.body.appendChild(scoringCanvas);
Expand Down Expand Up @@ -279,6 +287,8 @@ var clm = {
runnerElement = element;
runnerBox = box;
}
// setup the jsfeat face tracker with the element
jf = new jsfeat_face(element, params.faceDetection.workSize);
// start named timeout function
runnerTimeout = requestAnimFrame(runnerFunction);
}
Expand Down Expand Up @@ -889,32 +899,14 @@ var clm = {

// detect position of face on canvas/video element
var detectPosition = function(el) {
var canvas = document.createElement('canvas');
canvas.width = el.width;
canvas.height = el.height;
var cc = canvas.getContext('2d');
cc.drawImage(el, 0, 0, el.width, el.height);

// do viola-jones on canvas to get initial guess, if we don't have any points
/*var comp = ccv.detect_objects(
ccv.grayscale(canvas), ccv.cascade, 5, 1
);*/

var jf = new jsfeat_face(canvas);
var comp = jf.findFace();
var comp = jf.findFace(params.faceDetection);

if (comp.length > 0) {
candidate = comp[0];
if (comp) {
candidate = comp;
} else {
return false;
}

for (var i = 1; i < comp.length; i++) {
if (comp[i].confidence > candidate.confidence) {
candidate = comp[i];
}
}

return candidate;
}

Expand Down
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
"type": "git",
"url": "https://github.com/auduno/clmtrackr.git"
},
"devDependencies": {
"grunt": "~0.4.2",
"grunt-contrib-concat": "~0.3.0",
"grunt-contrib-uglify": "~0.2.7"
"dependencies": {
"grunt": "^0.4.5",
"grunt-contrib-concat": "^0.3.0",
"grunt-contrib-uglify": "^0.2.7"
}
}
}