Skip to content

Instantly share code, notes, and snippets.

@jefBinomed
Created October 27, 2020 16:58
Show Gist options
  • Save jefBinomed/031d77184db58768468e81738108ee7b to your computer and use it in GitHub Desktop.
Save jefBinomed/031d77184db58768468e81738108ee7b to your computer and use it in GitHub Desktop.
2020-10-push-the-limit-detector
export class Detector {
/**
* Will create the write dectector object according to the type.
* @param type: the type of detector to use -> see constant TYPES to use the correct types
* @param options: each detector should work with speficics options that could override. Check the desire constant. Note that text detector don't need options
**/
constructor(type, options) {
this.type = type;
switch (type) {
case TYPES.face:
this.detectorEngine = new FaceDetector(options);
break;
case TYPES.barcode:
this.detectorEngine = new BarcodeDetector(options);
break;
case TYPES.text:
this.detectorEngine = new TextDecoder();
break;
default:
}
}
/**
* Where the magic happen ;)
* @param image: the source image or canvas where we want to detect something
* @returns a Promise with the result of the detection
**/
detect(image) {
if (this.detectorEngine) {
return this.detectorEngine.detect(image);
}
}
}
/**
* Helper function to check if the browser support the feature
* @return true or false according to your browser support
**/
export function isAvailable(type) {
let detectorFeature = undefined;
switch (type) {
case TYPES.face:
detectorFeature = "FaceDetector";
case TYPES.barcode:
detectorFeature = "BarcodeDetector";
case TYPES.text:
detectorFeature = "TextDetector";
}
return detectorFeature && detectorFeature in window;
}
/**
* The managed types
*/
export const TYPES = {
face: "face",
text: "text",
barcode: "barcode"
};
/**
* The options of face
* You can override the number of detected faces
* FastMode is to use when you want to have a quick result. The result will be less precise if fastMode is set to true
*/
export const OPTIONS_FACE = {
maxDetectedFaces: 1,
fastMode: false
};
/**
* The list if BARCODE format to check. You can reduce this list if you want to speed detection process
**/
export const OPTIONS_BARCODE = {
formats: [
"aztec",
"code_128",
"code_39",
"code_93",
"codabar",
"data_matrix",
"ean_13",
"ean_8",
"itf",
"pdf417",
"qr_code",
"upc_a",
"upc_e"
]
};
import { Detector } from "./detector.js";
export class UserMediaHelper {
/**
* @param canvas: the dom element corresponding to canvas
* @param video: the dom element corresponding to video
* @param videoArea: the dom element around the canvas (to fix the size of the output video)
**/
constructor(canvas, video, videoArea) {
this.stopDraw = false;
this.canvas = canvas;
this.video = video;
this.videoArea = videoArea;
this.context = undefined;
this.ratio = undefined;
this.callbackDraw = undefined;
this.isDetecting = false;
this.detectedObjects = [];
}
/**
* Method to call to receive the results of detection
* @param type: the type of detector to use -> See the Helper Detector to have more information
* @param options: the options passed to the previous detector. If you want to use TextDetector, please give "null" as value
* @param callback: the callback function that will receive the results of the detection. The function should be like (detectedObject)=>{}
**/
addDetectorCallbak(type, options, callback) {
this.detector = new Detector(type, options);
this.callbackDetector = callback;
}
/**
* Method to call if you want to draw something on the canvas based on the detection
* @oaram callBackFunction: the method call at every frame to draw on the canvas. The function should be like (context, video, canvas) => {}
**/
addCallbackDraw(callBackFunction) {
this.callbackDraw = callBackFunction;
}
/**
* This method should be called to stop the process of captation and detection
**/
stop() {
const stream = this.video.srcObject;
const tracks = stream.getTracks();
tracks.forEach(function (track) {
track.stop();
});
this.video.pause();
this.stopDraw = true;
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
}
/**
* Call this method to start the captation and detection process
**/
async getUserMedia() {
// Grab camera stream.
const constraints = {
video: {
facingMode: "user", // To be sure to use the front camera for smartphones !
frameRate: 60 // To be sure to have a high rate of images
}
};
this.video.srcObject = await navigator.mediaDevices.getUserMedia(
constraints
);
// We starts the video
await this.video.play();
// The canvas take the size of the screen
this.canvas.height = this.videoArea.getBoundingClientRect().height;
this.canvas.width = this.videoArea.getBoundingClientRect().width;
// HACK: Face Detector doesn't accept canvas whose width is odd.
if (this.canvas.width % 2 === 1) {
this.canvas.width += 1;
}
this.context = this.canvas.getContext("2d");
// Ratio use to determine the rendering of video in canvas
// We take the max ratio and apply it to canvas after
// Width could be diferent from camera and screen !
this.ratio = Math.max(
this.canvas.width / this.video.videoWidth,
this.canvas.height / this.video.videoHeight
);
this.stopDraw = false;
this._draw();
}
/**
* PRIVATE METHOD
**/
/**
* Private method with the draw process
*/
_draw() {
if (this.stopDraw) {
return;
}
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
// To be sure to have the minimum delay between frames
requestAnimationFrame(this._draw.bind(this));
// Draw video frame.
this.context.drawImage(
this.video, // Source
(this.canvas.width - this.video.videoWidth * this.ratio) / 2, // x dest in canvas
// => use to manage portrait vs landscape
0, // y dest in canvas
this.video.videoWidth * this.ratio, // width video in canvas
this.video.videoHeight * this.ratio
); // height video in canvas
// Detector Part
if (this.callbackDetector) {
if (!this.isDetecting) {
this.isDetecting = true;
this.detector.detect(this.canvas).then((detectedObjects) => {
this.callbackDetector(detectedObjects);
this.isDetecting = false;
});
}
}
// Draw Part
if (this.callbackDraw) {
this.callbackDraw(this.context, this.video, this.canvas);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment