Skip to content

Instantly share code, notes, and snippets.

@apple502j
Last active July 5, 2023 09:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save apple502j/553b5c6c2afe334a3c7c2a19914620ad to your computer and use it in GitHub Desktop.
Save apple502j/553b5c6c2afe334a3c7c2a19914620ad to your computer and use it in GitHub Desktop.
class Scratch3FaceSensingBlocks {
constructor (runtime) {
/**
* The runtime instantiating this block package.
* @type {Runtime}
*/
this.runtime = runtime;
this.runtime.emit('EXTENSION_DATA_LOADING', true);
Blazeface.load().then(model => {
this.blazeface = model;
if (this.runtime.ioDevices) {
// Kick off looping the analysis logic.
this._loop();
}
});
this.cachedSize = 100;
this.cachedTilt = 90; // Array of recent boolean values for whether or not a face was detected
this.isDetectedArrayLength = 5;
this.isDetectedArray = new Array(this.isDetectedArrayLength);
this.isDetectedArray.fill(false, 0, this.isDetectedArrayLength); // Smoothed value for whether or not a face was detected
this.smoothedIsDetected = false;
this._clearAttachments = this._clearAttachments.bind(this);
this.runtime.on('PROJECT_STOP_ALL', this._clearAttachments);
}
static get DEFAULT_FACE_SENSING_STATE () {
return ({
attachedToPartNumber: null,
offsetDirection: 0,
offsetSize: 0,
offsetX: 0,
offsetY: 0,
prevDirection: 0,
prevSize: 100,
prevX: 0,
prevY: 0
});
}
static get DIMENSIONS () {
return [480, 360];
}
static get INTERVAL () {
return 1000 / 15;
}
static get STATE_KEY () {
return 'Scratch.faceSensing';
}
_clearAttachments () {
this.runtime.targets.forEach(target => {
const state = this._getFaceSensingState(target);
state.attachedToPartNumber = null;
});
}
_loop () {
setTimeout(this._loop.bind(this), Math.max(this.runtime.currentStepTime, Scratch3FaceSensingBlocks.INTERVAL));
const frame = this.runtime.ioDevices.video.getFrame({
format: Video.FORMAT_IMAGE_DATA,
dimensions: Scratch3FaceSensingBlocks.DIMENSIONS,
cacheTimeout: this.runtime.currentStepTime
});
if (frame) {
this.blazeface.estimateFaces(frame, false).then(faces => {
if (faces) {
if (!this.firstTime) {
this.firstTime = true;
this.runtime.emit('EXTENSION_DATA_LOADING', false);
}
this.currentFace = faces[0];
this.updateIsDetected();
}
});
}
}
updateIsDetected () {
this.isDetectedArray.push(!!this.currentFace);
if (this.isDetectedArray.length > this.isDetectedArrayLength) {
this.isDetectedArray.shift();
} // if every recent detection is false, set to false
if (this.isDetectedArray.every(item => item === false)) {
this.smoothedIsDetected = false;
} // if every recent detection is true, set to true
if (this.isDetectedArray.every(item => item === true)) {
this.smoothedIsDetected = true;
} // if there's a mix of true and false values, do not change the result
}
_getFaceSensingState (target) {
let faceSensingState = target.getCustomState(Scratch3FaceSensingBlocks.STATE_KEY);
if (!faceSensingState) {
faceSensingState = Clone.simple(Scratch3FaceSensingBlocks.DEFAULT_FACE_SENSING_STATE);
target.setCustomState(Scratch3FaceSensingBlocks.STATE_KEY, faceSensingState);
}
return faceSensingState;
}
getInfo () {
// Enable the video layer
this.runtime.ioDevices.video.enableVideo(); // Return extension definition
return {
id: 'faceSensing',
name: formatMessage({
id: 'faceSensing.categoryName',
default: 'Face Sensing',
description: 'Name of face sensing extension'
}),
blockIconURI: blockIconURI,
menuIconURI: menuIconURI,
blocks: [{
opcode: 'goToPart',
text: formatMessage({
id: 'faceSensing.goToPart',
default: 'go to [PART]',
description: ''
}),
blockType: BlockType.COMMAND,
arguments: {
PART: {
type: ArgumentType.STRING,
menu: 'PART',
defaultValue: '2'
}
},
filter: [TargetType.SPRITE]
}, {
opcode: 'pointInFaceTiltDirection',
text: formatMessage({
id: 'faceSensing.pointInFaceTiltDirection',
default: 'point in direction of face tilt',
description: ''
}),
blockType: BlockType.COMMAND,
filter: [TargetType.SPRITE]
}, {
opcode: 'setSizeToFaceSize',
text: formatMessage({
id: 'faceSensing.setSizeToFaceSize',
default: 'set size to face size',
description: ''
}),
blockType: BlockType.COMMAND,
filter: [TargetType.SPRITE]
}, '---', {
opcode: 'whenTilted',
text: formatMessage({
id: 'faceSensing.whenTilted',
default: 'when face tilts [DIRECTION]',
description: ''
}),
blockType: BlockType.HAT,
arguments: {
DIRECTION: {
type: ArgumentType.STRING,
menu: 'TILT',
defaultValue: 'left'
}
}
}, {
opcode: 'whenSpriteTouchesPart',
text: formatMessage({
id: 'faceSensing.whenSpriteTouchesPart',
default: 'when this sprite touches a[PART]',
description: ''
}),
arguments: {
PART: {
type: ArgumentType.STRING,
menu: 'PART',
defaultValue: '2'
}
},
blockType: BlockType.HAT,
filter: [TargetType.SPRITE]
}, {
opcode: 'whenFaceDetected',
text: formatMessage({
id: 'faceSensing.whenFaceDetected',
default: 'when a face is detected',
description: ''
}),
blockType: BlockType.HAT
}, '---', {
opcode: 'faceIsDetected',
text: formatMessage({
id: 'faceSensing.faceDetected',
default: 'a face is detected?',
description: ''
}),
blockType: BlockType.BOOLEAN
}, // {
// opcode: 'attachToPart',
// text: formatMessage({
// id: 'faceSensing.attachToPart',
// default: 'attach to [PART]',
// description: ''
// }),
// blockType: BlockType.COMMAND,
// arguments: {
// PART: {
// type: ArgumentType.STRING,
// menu: 'PART',
// defaultValue: '2'
// }
// }
// },
{
opcode: 'faceTilt',
text: formatMessage({
id: 'faceSensing.faceTilt',
default: 'face tilt',
description: ''
}),
blockType: BlockType.REPORTER
}, // {
// opcode: 'partX',
// text: formatMessage({
// id: 'faceSensing.partX',
// default: 'x position of [PART]',
// description: ''
// }),
// arguments: {
// PART: {
// type: ArgumentType.NUMBER,
// menu: 'PART',
// defaultValue: '2'
// }
// },
// blockType: BlockType.REPORTER
// },
// {
// opcode: 'partY',
// text: formatMessage({
// id: 'faceSensing.partY',
// default: 'y position of [PART]',
// description: ''
// }),
// arguments: {
// PART: {
// type: ArgumentType.NUMBER,
// menu: 'PART',
// defaultValue: '2'
// }
// },
// blockType: BlockType.REPORTER
// },
{
opcode: 'faceSize',
text: formatMessage({
id: 'faceSensing.faceSize',
default: 'face size',
description: ''
}),
blockType: BlockType.REPORTER
} // {
// opcode: 'probability',
// text: formatMessage({
// id: 'faceSensing.probability',
// default: 'probability of face detection',
// description: ''
// }),
// blockType: BlockType.REPORTER
// },
// {
// opcode: 'numberOfFaces',
// text: formatMessage({
// id: 'faceSensing.numberOfFaces',
// default: 'number of faces',
// description: ''
// }),
// blockType: BlockType.REPORTER
// }
],
menus: {
PART: [{
text: 'nose',
value: '2'
}, {
text: 'mouth',
value: '3'
}, {
text: 'left eye',
value: '0'
}, {
text: 'right eye',
value: '1'
}, {
text: 'between eyes',
value: '6'
}, {
text: 'left ear',
value: '4'
}, {
text: 'right ear',
value: '5'
}, {
text: 'top of head',
value: '7'
}],
TILT: [{
text: 'left',
value: 'left'
}, {
text: 'right',
value: 'right'
}]
}
};
}
getBetweenEyesPosition () {
// center point of a line between the eyes
const leftEye = this.getPartPosition(0);
const rightEye = this.getPartPosition(1);
const betweenEyes = {
x: 0,
y: 0
};
betweenEyes.x = leftEye.x + (rightEye.x - leftEye.x) / 2;
betweenEyes.y = leftEye.y + (rightEye.y - leftEye.y) / 2;
return betweenEyes;
}
getTopOfHeadPosition () {
// Estimated top of the head point:
// Make a line perpendicular to the line between the eyes, through
// its center, and move upward along it the distance from the point
// between the eyes to the mouth.
const leftEyePos = this.getPartPosition(0);
const rightEyePos = this.getPartPosition(1);
const mouthPos = this.getPartPosition(3);
const dx = rightEyePos.x - leftEyePos.x;
const dy = rightEyePos.y - leftEyePos.y;
const directionRads = Math.atan2(dy, dx) + Math.PI / 2;
const betweenEyesPos = this.getBetweenEyesPosition();
const mouthDistance = this.distance(betweenEyesPos, mouthPos);
const topOfHeadPosition = {
x: 0,
y: 0
};
topOfHeadPosition.x = betweenEyesPos.x + mouthDistance * Math.cos(directionRads);
topOfHeadPosition.y = betweenEyesPos.y + mouthDistance * Math.sin(directionRads);
return topOfHeadPosition;
}
distance (pointA, pointB) {
const dx = pointA.x - pointB.x;
const dy = pointA.y - pointB.y;
return Math.sqrt(dx * dx + dy * dy);
}
whenSpriteTouchesPart (args, util) {
if (!this.currentFace) return false;
if (!this.currentFace.landmarks) return false;
const pos = this.getPartPosition(args.PART);
return util.target.isTouchingScratchPoint(pos.x, pos.y);
}
whenFaceDetected () {
return this.smoothedIsDetected;
}
faceIsDetected () {
return this.smoothedIsDetected;
}
numberOfFaces () {
return this.allFaces.length;
}
probability () {
if (this.currentFace) {
return Math.round(this.currentFace.probability * 100);
}
return 0;
}
faceSize () {
if (!this.currentFace) return this.cachedSize;
const size = Math.round(this.currentFace.bottomRight[0] - this.currentFace.topLeft[0]);
this.cachedSize = size;
return size;
}
getPartPosition (part) {
const defaultPos = {
x: 0,
y: 0
};
if (!this.currentFace) return defaultPos;
if (!this.currentFace.landmarks) return defaultPos;
if (Number(part) === 6) {
return this.getBetweenEyesPosition();
}
if (Number(part) === 7) {
return this.getTopOfHeadPosition();
}
const result = this.currentFace.landmarks[Number(part)];
if (result) {
return this.toScratchCoords(result);
}
return defaultPos;
}
toScratchCoords (position) {
return {
x: position[0] - 240,
y: 180 - position[1]
};
}
partX (args) {
return this.getPartPosition(args.PART).x;
}
partY (args) {
return this.getPartPosition(args.PART).y;
}
whenTilted (args) {
const TILT_THRESHOLD = 10;
if (args.DIRECTION === 'left') {
return this.faceTilt() < 90 - TILT_THRESHOLD;
}
if (args.DIRECTION === 'right') {
return this.faceTilt() > 90 + TILT_THRESHOLD;
}
return false;
}
goToPart (args, util) {
if (!this.currentFace) return;
const pos = this.getPartPosition(args.PART);
util.target.setXY(pos.x, pos.y);
}
pointInFaceTiltDirection (args, util) {
if (!this.currentFace) return;
util.target.setDirection(this.faceTilt());
}
setSizeToFaceSize (args, util) {
if (!this.currentFace) return;
util.target.setSize(this.faceSize());
}
attachToPart (args, util) {
const state = this._getFaceSensingState(util.target);
state.attachedToPartNumber = args.PART;
state.offsetX = 0;
state.offsetY = 0;
state.prevX = util.target.x;
state.prevY = util.target.y;
state.offsetDirection = 0;
state.prevDirection = util.target.direction;
state.offsetSize = 0;
state.prevSize = util.target.size;
}
updateAttachments () {
this.runtime.targets.forEach(target => {
const state = this._getFaceSensingState(target);
if (state.attachedToPartNumber) {
const partPos = this.getPartPosition(state.attachedToPartNumber);
if (target.x !== state.prevX) {
state.offsetX += target.x - state.prevX;
}
if (target.y !== state.prevY) {
state.offsetY += target.y - state.prevY;
}
if (target.direction !== state.prevDirection) {
state.offsetDirection += target.direction - state.prevDirection;
}
if (target.size !== state.prevSize) {
state.offsetSize += target.size - state.prevSize;
}
target.setXY(partPos.x + state.offsetX, partPos.y + state.offsetY);
target.setDirection(this.faceTilt() + state.offsetDirection);
target.setSize(this.faceSize() + state.offsetSize);
state.prevX = target.x;
state.prevY = target.y;
state.prevDirection = target.direction;
state.prevSize = target.size;
}
});
}
clearAttachments () {
this.runtime.targets.forEach(target => {
const state = this._getFaceSensingState(target);
state.attachedToPartNumber = null;
});
}
faceTilt () {
if (!this.currentFace) return this.cachedTilt;
const leftEyePos = this.getPartPosition(0);
const rightEyePos = this.getPartPosition(1);
const dx = rightEyePos.x - leftEyePos.x;
const dy = rightEyePos.y - leftEyePos.y;
const direction = 90 - MathUtil.radToDeg(Math.atan2(dy, dx));
const tilt = Math.round(direction);
this.cachedTilt = tilt;
return tilt;
}
}
@VatsalChowdhary
Copy link

What are the necessary imports that have to be made to run this code on my computer? Also, what is Clone?

@VatsalChowdhary
Copy link

What is TargetType, and what is MathUtil?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment