Permalink
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
467 lines (397 sloc) 15.9 KB
<!doctype html>
<html lang="en">
<head>
<title>Mood Ring</title>
<meta charset="utf-8">
<link href="./styles/bootstrap.min.css" rel="stylesheet" type="text/css">
<style>
@import url(https://fonts.googleapis.com/css?family=Lato:300italic,700italic,300,700);
body {
font-family: 'Lato';
background-color: #f0f0f0;
margin: 0px auto;
max-width: 1150px;
}
#overlay {
position: absolute;
top: 0px;
left: 0px;
-o-transform : scaleX(-1);
-webkit-transform : scaleX(-1);
transform : scaleX(-1);
-ms-filter : fliph; /*IE*/
filter : fliph; /*IE*/
}
#videoel {
-o-transform : scaleX(-1);
-webkit-transform : scaleX(-1);
transform : scaleX(-1);
-ms-filter : fliph; /*IE*/
filter : fliph; /*IE*/
}
#container {
position : relative;
width : 370px;
/*margin : 0px auto;*/
}
#content {
margin-top : 50px;
margin-left : auto;
margin-right : auto;
max-width: 600px;
}
h2 {
font-weight : 400;
}
.btn {
font-family: 'Lato';
font-size: 16px;
}
#controls {
text-align : center;
}
#emotion_container {
width: 600px;
}
#emotion_icons {
height: 50px;
padding-left: 40px;
}
.emotion_icon {
width : 40px;
height : 40px;
margin-top: 5px;
/*margin-left : 13px;*/
margin-left : 35px;
}
#emotion_chart, #emotion_icons {
margin: 0 auto;
width : 400px;
}
#icon1, #icon2, #icon3, #icon4, #icon5, #icon6 {
visibility : hidden;
}
/* d3 */
.bar {
fill : steelblue;
fill-opacity : .9;
}
</style>
<script>
// getUserMedia only works over https in Chrome 47+, so we redirect to https. Also notify user if running from file.
if (window.location.protocol == "file:") {
alert("You seem to be running this example directly from a file. Note that these examples only work when served from a server or localhost due to canvas cross-domain restrictions.");
} else if (window.location.hostname !== "localhost" && window.location.protocol !== "https:"){
window.location.protocol = "https";
}
</script>
<!-- Commenting out developers google analytics -->
<!-- <script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-32642923-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script> -->
</head>
<body>
<script src="./js/libs/utils.js"></script>
<script src="../build/clmtrackr.js"></script>
<script src="../models/model_pca_20_svm.js"></script>
<script src="./js/libs/Stats.js"></script>
<script src="./js/libs/d3.min.js"></script>
<script src="./js/emotion_classifier.js"></script>
<script src="./js/emotionmodel.js"></script>
<div id="content">
<h2>Mood Ring</h2>
<p>This a test of emotion detection based on parameter output from <em>clmtrackr</em>.</p>
<div id="container">
<video id="videoel" width="400" height="300" preload="auto" loop playsinline autoplay>
</video>
<canvas id="overlay" width="400" height="300"></canvas>
</div>
<div id="emotion_container">
<div id="emotion_icons">
<img class="emotion_icon" id="icon1" src="./media/icon_angry.png">
<img class="emotion_icon" id="icon2" src="./media/icon_sad.png">
<img class="emotion_icon" id="icon3" src="./media/icon_surprised.png">
<img class="emotion_icon" id="icon4" src="./media/icon_happy.png">
</div>
<div id='emotion_chart'></div>
</div>
<div id="controls">
<input class="btn" type="button" value="wait, loading video" disabled="disabled" onclick="startVideo()" id="startbutton"></input>
<input class="btn" type="button" value="stop" disabled="disabled" onclick="stopVideo()" id="stopbutton"></input>
</div>
<script>
var vid = document.getElementById('videoel');
var vid_width = vid.width;
var vid_height = vid.height;
var overlay = document.getElementById('overlay');
var overlayCC = overlay.getContext('2d');
var topEmotion = "none";
var emotionDict = {
angry: 1,
sad: 2,
surprised: 3,
happy: 4,
none: 5
}
var absenceCount = 0;
var serialControl = 0;
var noFace = true;
/********** check and set up video/webcam **********/
function enablestart() {
var startbutton = document.getElementById('startbutton');
startbutton.value = "start";
startbutton.disabled = null;
stopbutton.disabled = null;
}
function adjustVideoProportions() {
// resize overlay and video if proportions are different
// keep same height, just change width
var proportion = vid.videoWidth / vid.videoHeight;
vid_width = Math.round(vid_height * proportion);
vid.width = vid_width;
overlay.width = vid_width;
}
function gumSuccess(stream) {
// add camera stream if getUserMedia succeeded
if ("srcObject" in vid) {
vid.srcObject = stream;
} else {
vid.src = (window.URL && window.URL.createObjectURL(stream));
}
vid.onloadedmetadata = function() {
adjustVideoProportions();
vid.play();
}
vid.onresize = function() {
adjustVideoProportions();
if (trackingStarted) {
ctrack.stop();
ctrack.reset();
ctrack.start(vid);
}
}
}
function gumFail() {
alert("There was some problem trying to fetch video from your webcam. If you have a webcam, please make sure to accept when the browser asks for access to your webcam.");
}
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL || window.msURL || window.mozURL;
// check for camerasupport
if (navigator.mediaDevices) {
navigator.mediaDevices.getUserMedia({
video: true
}).then(gumSuccess).catch(gumFail);
} else if (navigator.getUserMedia) {
navigator.getUserMedia({
video: true
}, gumSuccess, gumFail);
} else {
alert("This demo depends on getUserMedia, which your browser does not seem to support. :(");
}
vid.addEventListener('canplay', enablestart, false);
/*********** setup of emotion detection *************/
// set eigenvector 9 and 11 to not be regularized. This is to better detect motion of the eyebrows
pModel.shapeModel.nonRegularizedVectors.push(9);
pModel.shapeModel.nonRegularizedVectors.push(11);
var ctrack = new clm.tracker({
useWebGL: true
});
ctrack.init(pModel);
var trackingStarted = false;
function startVideo() {
// start video
vid.play();
// start tracking
ctrack.start(vid);
trackingStarted = true;
// start loop to draw face
drawLoop();
}
function stopVideo() {
vid.pause();
// ctrack.stop();
}
function drawLoop() {
requestAnimFrame(drawLoop);
overlayCC.clearRect(0, 0, vid_width, vid_height);
//psrElement.innerHTML = "score :" + ctrack.getScore().toFixed(4);
if (ctrack.getCurrentPosition()) {
ctrack.draw(overlay);
}
var cp = ctrack.getCurrentParameters();
var er = ec.meanPredict(cp);
if (er) {
updateData(er);
// er is a json with this in it:
// 0: {emotion: "angry", value: 0.07152128223058615}
// 1: {emotion: "sad", value: 0.7095454875430434}
// 2: {emotion: "surprised", value: 0.08409529802001421}
// 3: {emotion: "happy", value: 0.03446010808748219}
serialControl++;
for (var i = 0; i < er.length; i++) {
// if (er[i] == topEmotion.emotion) {
// topEmotion.value = er[i].value;
// }
if (er[i].value > 0.5) { //threshold value
//reset passive (no face) value
absenceCount = 0;
noFace = false;
newFace = true;
document.getElementById('icon' + (i + 1)).style.visibility = 'visible'; //change the gui icon
if (er[i].emotion != topEmotion) {
if (er[i].emotion == "happy" && er[i].value > 0.7) { //detecting happy too much, upping the threshold
console.log("topEmotion is: " + topEmotion);
topEmotion = er[i].emotion;
} else if (er[i].emotion != "happy") {
console.log("topEmotion is: " + topEmotion);
topEmotion = er[i].emotion;
}
}
} else {
document.getElementById('icon' + (i + 1)).style.visibility = 'hidden';
absenceCount++;
}
}
if (serialControl / 100 >= 1 && newFace === true) {
console.log("sending: " + emotionDict[topEmotion] + "(" + topEmotion + ")");
var xhr = new XMLHttpRequest();
xhr.open("POST", 'http://localhost:3000/', true);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.send(JSON.stringify({
emotion: topEmotion,
serialVal: emotionDict[topEmotion]
}));
serialControl = 0;
newFace = false;
}
if (absenceCount >= 1000 && noFace === false) {
noFace = true;
topEmotion = 'none';
console.log("no strong emotion detected, switching to neutral color - " + emotionDict[topEmotion]);
var xhr = new XMLHttpRequest();
xhr.open("POST", 'http://localhost:3000/', true);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.send(JSON.stringify({
emotion: topEmotion,
serialVal: emotionDict[topEmotion]
}));
}
}
}
delete emotionModel['disgusted'];
delete emotionModel['fear'];
var ec = new emotionClassifier();
ec.init(emotionModel);
var emotionData = ec.getBlank();
// console.log(emotionData);
/************ d3 code for barchart *****************/
var margin = {
top: 20,
right: 20,
bottom: 10,
left: 40
},
width = 400 - margin.left - margin.right,
height = 100 - margin.top - margin.bottom;
var barWidth = 30;
var formatPercent = d3.format(".0%");
var x = d3.scale.linear()
.domain([0, ec.getEmotions().length]).range([margin.left, width + margin.left]);
var y = d3.scale.linear()
.domain([0, 1]).range([0, height]);
var svg = d3.select("#emotion_chart").append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
svg.selectAll("rect").
data(emotionData).
enter().
append("svg:rect").
attr("x", function(datum, index) {
return x(index);
}).
attr("y", function(datum) {
return height - y(datum.value);
}).
attr("height", function(datum) {
return y(datum.value);
}).
attr("width", barWidth).
attr("fill", "#2d578b");
svg.selectAll("text.labels").
data(emotionData).
enter().
append("svg:text").
attr("x", function(datum, index) {
return x(index) + barWidth;
}).
attr("y", function(datum) {
return height - y(datum.value);
}).
attr("dx", -barWidth / 2).
attr("dy", "1.2em").
attr("text-anchor", "middle").
text(function(datum) {
return datum.value;
}).
attr("fill", "white").
attr("class", "labels");
svg.selectAll("text.yAxis").
data(emotionData).
enter().append("svg:text").
attr("x", function(datum, index) {
return x(index) + barWidth;
}).
attr("y", height).
attr("dx", -barWidth / 2).
attr("text-anchor", "middle").
attr("style", "font-size: 12").
text(function(datum) {
return datum.emotion;
}).
attr("transform", "translate(0, 18)").
attr("class", "yAxis");
function updateData(data) {
// update
var rects = svg.selectAll("rect")
.data(data)
.attr("y", function(datum) {
return height - y(datum.value);
})
.attr("height", function(datum) {
return y(datum.value);
});
var texts = svg.selectAll("text.labels")
.data(data)
.attr("y", function(datum) {
return height - y(datum.value);
})
.text(function(datum) {
return datum.value.toFixed(1);
});
// enter
rects.enter().append("svg:rect");
texts.enter().append("svg:text");
// exit
rects.exit().remove();
texts.exit().remove();
}
/******** stats ********/
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
document.getElementById('container').appendChild(stats.domElement);
// update stats on every iteration
document.addEventListener('clmtrackrIteration', function(event) {
stats.update();
}, false);
</script>
</div>
</body>
</html>