Skip to content

Instantly share code, notes, and snippets.

@lmccart
Last active November 25, 2020 01:55
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save lmccart/532780549d49e36b5558 to your computer and use it in GitHub Desktop.
Save lmccart/532780549d49e36b5558 to your computer and use it in GitHub Desktop.
face tracking with clmtrackr.js + p5.js
////////////////////////////////////////////////////////////////////
//// clmtrackr
<html>
<head>
<script src="clmtrackr.js"></script>
<script src="models/model_pca_20_svm.js"></script>
<script src="p5.js"></script>
<script src="p5.dom.js"></script>
<script>
var ctracker;
function setup() {
// setup camera capture
var videoInput = createCapture(VIDEO);
videoInput.size(400, 300);
videoInput.position(0, 0);
// setup canvas
var cnv = createCanvas(400, 300);
cnv.position(0, 0);
// setup tracker
ctracker = new clm.tracker();
ctracker.init(pModel);
ctracker.start(videoInput.elt);
noStroke();
}
function draw() {
clear();
// get array of face marker positions [x, y] format
var positions = ctracker.getCurrentPosition();
for (var i=0; i<positions.length; i++) {
// set the color of the ellipse based on position on screen
fill(map(positions[i][0], width*0.33, width*0.66, 0, 255), map(positions[i][1], height*0.33, height*0.66, 0, 255), 255);
// draw ellipse at each position point
ellipse(positions[i][0], positions[i][1], 8, 8);
}
}
</script>
<style> body { padding: 0; margin: 0} </style>
</head>
<body>
</body>
</html>
////////////////////////////////////////////////////////////////////
//// sound / mouth
function preload() {
mouthSound = loadSound('scream.mp3');
}
// setup
mouthSound.loop();
mouthSound.setVolume(0);
// draw
if(positions.length > 0) {
var mouthH = positions[57][1] - positions[60][1];
var faceH = positions[7][1] - positions[33][1];
var mouthR = mouthH/faceH;
var v = constrain(map(mouthR, 0.05, 0.25, 0, 10), 0, 10);
mouthSound.setVolume(v);
}
////////////////////////////////////////////////////////////////////
//// mouth / tts
if (mouthR > 0.1) {
var u = new SpeechSynthesisUtterance(t);
speechSynthesis.speak(u);
} else {
speechSynthesis.cancel();
}
////////////////////////////////////////////////////////////////////
//// emotion / tts
// setup
ec = new emotionClassifier();
ec.init(emotionModel);
emotionData = ec.getBlank();
// draw
var cp = ctracker.getCurrentParameters();
var er = ec.meanPredict(cp);
for (var i=0; i<er.length; i++) {
text(er[i].emotion+' '+nfc(er[i].value, 2), 20, (i+1)*30);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment