Skip to content
Permalink
Browse files

initial commit

  • Loading branch information
georgemandis committed Sep 24, 2019
0 parents commit e6e1471fa40a9fbf5a069747b4b66ddb18e8d788
@@ -0,0 +1,2 @@
node_modules/*
.DS_Store

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

@@ -0,0 +1,214 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Face Detection & Expression Detection with face-api.js</title>
<style>
* {
box-sizing: border-box;
}
/* hidden but available for debugging */
video, canvas, #expression {
opacity:0;
}
.shown {
opacity:.5
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif;
transition:background 150ms;
background:#000;
transition: background-color 350ms;
}
body:after {
position: absolute;
top:50%;
left:50%;
transform: translate3d(-50%, -50%, 0);
font-size:132px;
}
/*
Possible expressions to detect:
'neutral'
'happy'
'sad'
'angry'
'fearful'
'disgusted'
'surprised'
*/
body.neutral {
background:#aaa;
}
body.neutral:after {
content: "😐"
}
body.happy {
background:yellow;
}
body.happy:after {
content: "😃"
}
body.surprised {
background:#a0a;
}
body.surprised:after {
content: "😮"
}
body.disgusted {
background:greenyellow;
}
body.disgusted:after {
content: "🤢"
}
body.sad {
background:dodgerblue;
}
body.sad:after {
content: "😔"
}
body.angry {
background:firebrick;
}
body.angry:after {
content: "😡"
}
body.fearful {
background:#fff;
}
body.fearful:after {
content: "😱"
}
</style>
</head>
<body>
<!--
These items are necessary but rendered
with `opacity:0` to be invisible. Pressing
the Space key will reveal them to help test/debug
-->
<h1 id='expression'>&nbsp;</h1>
<video onplay="onPlay(this)" id="inputVideo" autoplay muted></video>
<canvas id="overlay" />

<!--
This is the audio file it plays when a "sad"
expression is detected. Found in the public domain
on Archive.org:
https://archive.org/details/78_what-can-i-do-to-make-you-happy_charles-dickson-gordon-eddy_gbia0086082b
If you're testing things locally it's probably better
to download it once and serve locally.
-->

<audio id="cheerUp" src="https://ia800707.us.archive.org/6/items/78_what-can-i-do-to-make-you-happy_charles-dickson-gordon-eddy_gbia0086082b/What%20Can%20I%20Do%20to%20Make%20You%20Happy%20-%20Charles%20Dickson.mp3"></audio>

<script src="dist/face-api.min.js"></script>
<script>
const video = document.getElementById('inputVideo')
const cheerUp = document.getElementById('cheerUp')
async function run() {
// load the models
console.log("loading models...")
await faceapi.nets.ssdMobilenetv1.load('weights')
await faceapi.nets.faceExpressionNet.load('weights')
await faceapi.loadFaceRecognitionModel('weights')
const stream = await navigator.mediaDevices.getUserMedia({ video: {} })
video.srcObject = stream
}
async function onPlay() {
// Wait until the models are loaded
if (!isFaceDetectionModelLoaded()) return setTimeout(() => onPlay())
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: .5 }) //mino confidence
const ts = Date.now()
const result = await faceapi.detectSingleFace(video, options).withFaceExpressions()
if (result) {
result.expressions.forEach((expression) => {
if (Math.round(expression.probability) == 1) {
console.log(expression.expression, Math.round(expression.probability))
document.querySelector('body').className = expression.expression
document.querySelector('#expression').textContent = expression.expression
if (expression.expression === 'sad' && cheerUp.paused === true) {
cheerUp.play();
}
if (expression.expression === 'angry' && cheerUp.paused === false) {
cheerUp.pause();
cheerUp.currentTime = 0;
}
}
})
}
window.requestAnimationFrame(onPlay);
}
function isFaceDetectionModelLoaded() {
return !!faceapi.nets.ssdMobilenetv1.params
}
// Press space to reveal video stream and currently
// perceived expression for debugging/testing. Also
// press Escape to cancel audio in case demo is going
// off the rails.
document.addEventListener('keypress', (event) => {
switch (event.code) {
case "Space":
if (video.classList.contains('shown')) {
video.classList.remove('shown')
document.querySelector('#expression').classList.remove('shown')
}else{
video.classList.add('shown')
document.querySelector('#expression').classList.add('shown')
}
break;
case "Escape":
if (cheerUp.paused === false) {
cheerUp.pause();
cheerUp.currentTime = 0;
}
break;
}
event.preventDefault();
})
// note: on mobile a user interaction
// is required to initiate getUserMedia().
run();
</script>
</body>
</html>
@@ -0,0 +1,28 @@
{
"name": "cheer-me-up-and-sing-me-a-song",
"version": "1.0.0",
"description": "A little project that sings you a song to cheer you up when you're blue.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "git+https://github.com/javascriptforartists/cheer-me-up-and-sing-me-a-song.git"
},
"keywords": [
"face-api.js",
"facial",
"recognition",
"art",
"fun"
],
"author": "George Mandis",
"license": "ISC",
"bugs": {
"url": "https://github.com/javascriptforartists/cheer-me-up-and-sing-me-a-song/issues"
},
"homepage": "https://github.com/javascriptforartists/cheer-me-up-and-sing-me-a-song#readme",
"dependencies": {
}
}
Binary file not shown.
@@ -0,0 +1 @@
[{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0057930146946626555,"min":-0.7125408074435067}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006473719839956246,"min":-0.6408982641556684}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010509579321917366,"min":-1.408283629136927}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005666389652326995,"min":-0.7252978754978554}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010316079270605948,"min":-1.1760330368490781}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv3/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0063220320963392074,"min":-0.853474333005793}},{"name":"dense0/conv3/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010322785377502442,"min":-1.4658355236053466}},{"name":"dense0/conv3/bias","shape":[32],"dtype":"float32"},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0042531527724920535,"min":-0.5741756242864272}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010653339647779278,"min":-1.1825207009035}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005166931012097527,"min":-0.6355325144879957}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011478300188101974,"min":-1.3888743227603388}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006144821410085641,"min":-0.8479853545918185}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010541967317169788,"min":-1.3809977185492421}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005769844849904378,"min":-0.686611537138621}},{"name":"dense1/conv3/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010939095534530341,"min":-1.2689350820055196}},{"name":"dense1/conv3/bias","shape":[64],"dtype":"float32"},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037769308277204924,"min":-0.40790852939381317}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01188667194516051,"min":-1.4382873053644218}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006497045825509464,"min":-0.8381189114907208}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632198913424622,"min":-1.3377028750438316}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005947182225246056,"min":-0.7969224181829715}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011436844339557722,"min":-1.4524792311238306}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv3/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006665432686899222,"min":-0.8998334127313949}},{"name":"dense2/conv3/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01283421422920975,"min":-1.642779421338848}},{"name":"dense2/conv3/bias","shape":[128],"dtype":"float32"},{"name":"dense3/conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004711699953266218,"min":-0.6737730933170692}},{"name":"dense3/conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010955964817720302,"min":-1.3914075318504784}},{"name":"dense3/conv0/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00554193468654857,"min":-0.7149095745647656}},{"name":"dense3/conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016790372250126858,"min":-2.484975093018775}},{"name":"dense3/conv1/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv2/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006361540626077091,"min":-0.8142772001378676}},{"name":"dense3/conv2/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01777329678628959,"min":-1.7062364914838006}},{"name":"dense3/conv2/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv3/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006900275922289082,"min":-0.8625344902861353}},{"name":"dense3/conv3/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015449936717164282,"min":-1.9003422162112067}},{"name":"dense3/conv3/bias","shape":[256],"dtype":"float32"},{"name":"fc/weights","shape":[256,7],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004834276554631252,"min":-0.7203072066400565}},{"name":"fc/bias","shape":[7],"dtype":"float32"}],"paths":["face_expression_model-shard1"]}]
Binary file not shown.

0 comments on commit e6e1471

Please sign in to comment.
You can’t perform that action at this time.