Skip to content

Instantly share code, notes, and snippets.

@AzureRain1
Created August 19, 2021 07:38
Show Gist options
  • Save AzureRain1/c4fea9fd2eb16b38b22229ea0b3d09cc to your computer and use it in GitHub Desktop.
Save AzureRain1/c4fea9fd2eb16b38b22229ea0b3d09cc to your computer and use it in GitHub Desktop.
import DeviceDetector from "https://cdn.skypack.dev/device-detector-js@2.2.10";
// Usage: testSupport({client?: string, os?: string}[])
// Client and os are regular expressions.
// See: https://cdn.jsdelivr.net/npm/device-detector-js@2.2.10/README.md for
// legal values for client and os
testSupport([
{client: 'Chrome'},
]);
function testSupport(supportedDevices:{client?: string; os?: string;}[]) {
const deviceDetector = new DeviceDetector();
const detectedDevice = deviceDetector.parse(navigator.userAgent);
let isSupported = false;
for (const device of supportedDevices) {
if (device.client !== undefined) {
const re = new RegExp(`^${device.client}$`);
if (!re.test(detectedDevice.client.name)) {
continue;
}
}
if (device.os !== undefined) {
const re = new RegExp(`^${device.os}$`);
if (!re.test(detectedDevice.os.name)) {
continue;
}
}
isSupported = true;
break;
}
if (!isSupported) {
alert(`This demo, running on ${detectedDevice.client.name}/${detectedDevice.os.name}, ` +
`is not well supported at this time, continue at your own risk.`);
}
}
// Options
const outputEl = document.getElementById('fps-output');
const decimalPlaces = 2;
const updateEachSecond = 1;
// Cache values
const decimalPlacesRatio = Math.pow(10, decimalPlaces);
let timeMeasurements1 = [];
let timeMeasurements2 = [];
// Final output
let fps1 = 0;
const animate = (canvas, context) => {
// console.log("a");
// fpsControl.tick();
timeMeasurements1.push(performance.now());
const msPassed = timeMeasurements1[timeMeasurements1.length - 1] - timeMeasurements1[0];
if (msPassed >= updateEachSecond * 1000) {
fps1 = Math.round(timeMeasurements1.length / msPassed * 1000 * decimalPlacesRatio) / decimalPlacesRatio;
timeMeasurements1 = [];
}
outputEl.innerText = fps1;
// context.font = '48px serif';
// context.fillStyle = "#ffffff";
// context.fillText(fps, 10, 50);
requestAnimationFrame(function() {
animate(canvas, context);
});
};
/**
* @fileoverview Demonstrates a minimal use case for MediaPipe face tracking.
*/
const controls = window;
const drawingUtils = window;
const mpFaceDetection = window;
// Our input frames will come from here.
const videoElement =
document.getElementsByClassName('input_video')[0] as HTMLVideoElement;
const canvasElement =
document.getElementsByClassName('output_canvas')[0] as HTMLCanvasElement;
const controlsElement =
document.getElementsByClassName('control-panel')[0] as HTMLDivElement;
const canvasCtx = canvasElement.getContext('2d')!;
// We'll add this to our control panel later, but we'll save it here so we can
// call tick() each time the graph runs.
const fpsControl = new controls.FPS();
// Optimization: Turn off animated spinner after its hiding animation is done.
const spinner = document.querySelector('.loading')! as HTMLDivElement;
spinner.ontransitionend = () => {
spinner.style.display = 'none';
};
animate(canvasElement, canvasCtx);
// Final output
let fps2 = 0;
function onResults(results: mpFaceDetection.Results): void {
// Hide the spinner.
document.body.classList.add('loaded');
// Update the frame rate.
fpsControl.tick();
timeMeasurements2.push(performance.now());
const msPassed = timeMeasurements2[timeMeasurements2.length - 1] - timeMeasurements2[0];
if (msPassed >= updateEachSecond * 1000) {
fps2 = Math.round(timeMeasurements2.length / msPassed * 1000 * decimalPlacesRatio) / decimalPlacesRatio;
timeMeasurements2 = [];
}
// Draw the overlays.
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(
results.image, 0, 0, canvasElement.width, canvasElement.height);
if (results.detections.length > 0) {
drawingUtils.drawRectangle(
canvasCtx, results.detections[0].boundingBox,
{color: 'blue', lineWidth: 4, fillColor: '#00000000'});
drawingUtils.drawLandmarks(canvasCtx, results.detections[0].landmarks, {
color: 'red',
radius: 5,
});
}
// fps
canvasCtx.font = '48px serif';
canvasCtx.fillStyle = "#ffffff";
canvasCtx.fillText(fps2, 10, 50);
canvasCtx.restore();
}
const faceDetection = new mpFaceDetection.FaceDetection({locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/@mediapipe/face_detection@0.4/${file}`;
}});
faceDetection.onResults(onResults);
// Present a control panel through which the user can manipulate the solution
// options.
new controls
.ControlPanel(controlsElement, {
selfieMode: true,
model: 'short',
minDetectionConfidence: 0.5,
})
.add([
new controls.StaticText({title: 'MediaPipe Face Detection'}),
fpsControl,
new controls.Toggle({title: 'Selfie Mode', field: 'selfieMode'}),
new controls.SourcePicker({
onSourceChanged: () => {
faceDetection.reset();
},
onFrame:
async (input: controls.InputImage, size: controls.Rectangle) => {
const aspect = size.height / size.width;
let width: number, height: number;
if (window.innerWidth > window.innerHeight) {
height = window.innerHeight;
width = height / aspect;
} else {
width = window.innerWidth;
height = width * aspect;
}
canvasElement.width = width;
canvasElement.height = height;
await faceDetection.send({image: input});
},
examples: {
images: [],
videos: [],
},
}),
new controls.Slider({
title: 'Model Selection',
field: 'model',
discrete: {'short': 'Short-Range', 'full': 'Full-Range'},
}),
new controls.Slider({
title: 'Min Detection Confidence',
field: 'minDetectionConfidence',
range: [0, 1],
step: 0.01
}),
])
.on(x => {
const options = x as mpFaceDetection.Options;
// options.useCpuInference = true;
videoElement.classList.toggle('selfie', options.selfieMode);
faceDetection.setOptions(options);
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment