Last active
September 27, 2018 05:33
-
-
Save fragm3/cbbcdb1eb8b5036d3d3297bb80bd317b to your computer and use it in GitHub Desktop.
Code for webcam
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html lang="en"> | |
<head> | |
<style> | |
body { | |
overflow: hidden; | |
} | |
</style> | |
<meta charset="utf-8"> | |
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> | |
<meta name="theme-color" content="#000000"> | |
<!-- | |
manifest.json provides metadata used when your web app is added to the | |
homescreen on Android. See https://developers.google.com/web/fundamentals/engage-and-retain/web-app-manifest/ | |
--> | |
<link rel="manifest" href="%PUBLIC_URL%/manifest.json"> | |
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico"> | |
<!-- | |
Notice the use of %PUBLIC_URL% in the tags above. | |
It will be replaced with the URL of the `public` folder during the build. | |
Only files inside the `public` folder can be referenced from the HTML. | |
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will | |
work correctly both with client-side routing and a non-root public URL. | |
Learn how to configure a non-root public URL by running `npm run build`. | |
--> | |
<title>AI Playground</title> | |
<script src="https://download.affectiva.com/js/3.2.1/affdex.js"></script> | |
</head> | |
<body> | |
<noscript> | |
You need to enable JavaScript to run this app. | |
</noscript> | |
<div id="root"></div> | |
<!-- | |
This HTML file is a template. | |
If you open it directly in the browser, you will see an empty page. | |
You can add webfonts, meta tags, or analytics to this file. | |
The build step will place the bundled scripts into the <body> tag. | |
To begin the development, run `npm start` or `yarn start`. | |
To create a production bundle, use `npm run build` or `yarn build`. | |
--> | |
</body> | |
</html> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
.cam-container { | |
height: 100%; | |
width: 100%; | |
border-left: 1px solid rgba(255, 255,255, 0.1); | |
} | |
.contain { | |
width: 100%; | |
height: 100%; | |
vertical-align: center; | |
display: -ms-flexbox; | |
display: flex; | |
flex-direction: column; | |
align-items: center; | |
justify-content: space-around; | |
} | |
.contain-sm { | |
height: calc(100% - 300px) !important; | |
width: 80%; | |
margin-left: 10%; | |
position: relative; | |
} | |
.front { | |
z-index: 10000; | |
margin: auto; | |
border-radius: 8px; | |
box-shadow: 0 19px 51px 0 rgba(0,0,0,0.16), 0 14px 19px 0 rgba(0,0,0,0.07); | |
visibility: hidden; | |
display: none; | |
} | |
.response-canvas { | |
z-index: 10000; | |
margin: 0 auto; | |
border-radius: 8px; | |
box-shadow: 0 19px 51px 0 rgba(0,0,0,0.16), 0 14px 19px 0 rgba(0,0,0,0.07); | |
order: 2; | |
display: block; | |
width: 80%; | |
} | |
.cam-container video{ | |
z-index: 0; | |
right: 0; | |
border-radius: 8px; | |
overflow: hidden; | |
-webkit-box-shadow: 0 19px 51px 0 rgba(0,0,0,0.16), 0 14px 19px 0 rgba(0,0,0,0.07); | |
box-shadow: 0 19px 51px 0 rgba(0,0,0,0.16), 0 14px 19px 0 rgba(0,0,0,0.07); | |
-o-object-fit: fill; | |
object-fit: fill; | |
order: 1; | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import React, { Component } from 'react'; | |
import PropTypes from 'prop-types'; | |
import './Webcam.css'; | |
import { DH_UNABLE_TO_CHECK_GENERATOR } from 'constants'; | |
function hasGetUserMedia() { | |
return !!( | |
navigator.mediaDevices.getUserMedia || | |
navigator.webkitGetUserMedia || | |
navigator.mozGetUserMedia || | |
navigator.msGetUserMedia | |
); | |
} | |
const constrainStringType = PropTypes.oneOfType([ | |
PropTypes.string, | |
PropTypes.arrayOf(PropTypes.string), | |
PropTypes.shape({ | |
exact: PropTypes.oneOfType([ | |
PropTypes.string, | |
PropTypes.arrayOf(PropTypes.string) | |
]) | |
}), | |
PropTypes.shape({ | |
ideal: PropTypes.oneOfType([ | |
PropTypes.string, | |
PropTypes.arrayOf(PropTypes.string) | |
]) | |
}) | |
]); | |
const constrainBooleanType = PropTypes.oneOfType([ | |
PropTypes.shape({ | |
exact: PropTypes.bool | |
}), | |
PropTypes.shape({ | |
ideal: PropTypes.bool | |
}) | |
]); | |
//var faceMode = window.affdex.FaceDetectorMode.LARGE_FACES; | |
//var detector = new window.affdex.FrameDetector("videoTestClass", "580", "420", faceMode); | |
var detector = new window.affdex.PhotoDetector(); | |
const constrainLongType = PropTypes.oneOfType([ | |
PropTypes.number, | |
PropTypes.shape({ | |
exact: PropTypes.number, | |
ideal: PropTypes.number, | |
min: PropTypes.number, | |
max: PropTypes.number | |
}) | |
]); | |
const constrainDoubleType = constrainLongType; | |
const audioConstraintType = PropTypes.shape({ | |
deviceId: constrainStringType, | |
groupId: constrainStringType, | |
autoGainControl: constrainBooleanType, | |
channelCount: constrainLongType, | |
latency: constrainDoubleType, | |
noiseSuppression: constrainBooleanType, | |
sampleRate: constrainLongType, | |
sampleSize: constrainLongType, | |
volume: constrainDoubleType | |
}); | |
const videoConstraintType = PropTypes.shape({ | |
deviceId: constrainStringType, | |
groupId: constrainStringType, | |
aspectRatio: constrainDoubleType, | |
facingMode: constrainStringType, | |
frameRate: constrainDoubleType, | |
height: constrainLongType, | |
width: constrainLongType | |
}); | |
export default class Webcam extends Component { | |
static defaultProps = { | |
audio: true, | |
className: '', | |
height: 420, | |
onUserMedia: () => {}, | |
onUserMediaError: () => {}, | |
screenshotFormat: 'image/webp', | |
width: 580, | |
screenshotQuality: 0.92 | |
}; | |
static propTypes = { | |
audio: PropTypes.bool, | |
onUserMedia: PropTypes.func, | |
onUserMediaError: PropTypes.func, | |
height: PropTypes.oneOfType([PropTypes.number, PropTypes.string]), | |
width: PropTypes.oneOfType([PropTypes.number, PropTypes.string]), | |
screenshotFormat: PropTypes.oneOf([ | |
'image/webp', | |
'image/png', | |
'image/jpeg' | |
]), | |
style: PropTypes.object, | |
className: PropTypes.string, | |
screenshotQuality: PropTypes.number, | |
screenshotWidth: PropTypes.number, | |
audioConstraints: audioConstraintType, | |
videoConstraints: videoConstraintType | |
}; | |
static mountedInstances = []; | |
static userMediaRequested = false; | |
constructor() { | |
super(); | |
this.state = { | |
hasUserMedia: false, | |
isPlaying: false | |
}; | |
} | |
componentDidMount() { | |
if (!hasGetUserMedia()) return; | |
Webcam.mountedInstances.push(this); | |
if (!this.state.hasUserMedia && !Webcam.userMediaRequested) { | |
this.requestUserMedia(); | |
} | |
this.startAffectiva(); | |
} | |
UNSAFE_componentWillUpdate(nextProps) { | |
if ( | |
JSON.stringify(nextProps.audioConstraints) !== | |
JSON.stringify(this.props.audioConstraints) || | |
JSON.stringify(nextProps.videoConstraints) !== | |
JSON.stringify(this.props.videoConstraints) | |
) { | |
this.requestUserMedia(); | |
} | |
} | |
componentWillUnmount() { | |
const index = Webcam.mountedInstances.indexOf(this); | |
Webcam.mountedInstances.splice(index, 1); | |
if (Webcam.mountedInstances.length === 0 && this.state.hasUserMedia) { | |
if (this.stream.stop) { | |
this.stream.stop(); | |
} else { | |
if (this.stream.getVideoTracks) { | |
this.stream.getVideoTracks().map(track => track.stop()); | |
} | |
if (this.stream.getAudioTracks) { | |
this.stream.getAudioTracks().map(track => track.stop()); | |
} | |
} | |
Webcam.userMediaRequested = false; | |
window.URL.revokeObjectURL(this.state.src); | |
} | |
} | |
getScreenshot() { | |
if (!this.state.hasUserMedia) return null; | |
const canvas = this.getCanvas(); | |
return ( | |
canvas && | |
canvas.toDataURL( | |
this.props.screenshotFormat, | |
this.props.screenshotQuality | |
) | |
); | |
} | |
drawToCanvas2 = (imgData, faces) => { | |
console.log("faces, ", faces); | |
if (!this.ctx2) { | |
const aspectRatio = this.props.width / this.props.height | |
this.canvas2.width = this.props.width; | |
this.canvas2.height = this.props.height; | |
this.ctx2 = this.canvas2.getContext('2d'); | |
} | |
const { ctx2, canvas2 } = this; | |
// Draw the image | |
ctx2.putImageData(imgData, 0, 0); | |
this.drawFeaturePoints(imgData, faces[0].featurePoints); | |
//ctx2.drawImage(img, 0, 0, img.width, img.height); | |
//this.drawTriangle(); | |
} | |
getCanvas = () => { | |
this.setState({isPlaying: true}) | |
if (!this.state.hasUserMedia || !this.video.videoHeight) return null; | |
if (!this.ctx) { | |
const aspectRatio = this.props.width / this.props.height; | |
this.canvas.width = this.props.width; | |
this.canvas.height = this.props.height; | |
this.ctx = this.canvas.getContext('2d'); | |
} | |
const { ctx, canvas } = this; | |
ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height); | |
if (detector && detector.isRunning) { | |
detector.process(ctx.getImageData(0, 0, canvas.width, canvas.height), 0); | |
} | |
//window.requestAnimationFrame(this.getCanvas); | |
setTimeout(this.getCanvas, 500); | |
return canvas; | |
} | |
drawTriangle = () => { | |
var canvas = this.canvas; | |
if (canvas.getContext) { | |
var ctx = canvas.getContext('2d'); | |
ctx.beginPath(); | |
ctx.moveTo(175, 200); | |
ctx.lineTo(200, 225); | |
ctx.lineTo(200, 175); | |
ctx.fill(); | |
} | |
} | |
startAffectiva = () => { | |
detector.detectAllEmotions(); | |
detector.detectAllExpressions(); | |
detector.detectAllEmojis(); | |
detector.detectAllAppearance(); | |
detector.addEventListener("onInitializeSuccess", function() { | |
console.log('#logs', "The detector reports initialized"); | |
}); | |
detector.addEventListener("onImageResultsSuccess", (faces, image, timestamp) => { | |
console.log("image", image) | |
//console.log("faces", faces) | |
if(faces && faces[0]){ | |
//this.drawFeaturePoints(image, faces[0].featurePoints); | |
this.drawToCanvas2(image, faces); | |
//window.requestAnimationFrame(this.getCanvas); | |
} | |
}); | |
detector.addEventListener("onImageResultsFailure", function (image, timestamp, err_detail) { | |
//console.log("Images failed", image) | |
}); | |
detector.start() | |
} | |
stopAffectiva = () => { | |
detector.removeEventListener(); | |
detector.stop(); | |
} | |
resetAffectiva = () => { | |
detector.reset(); | |
} | |
requestUserMedia() { | |
navigator.getUserMedia = | |
navigator.mediaDevices.getUserMedia || | |
navigator.webkitGetUserMedia || | |
navigator.mozGetUserMedia || | |
navigator.msGetUserMedia; | |
const sourceSelected = (audioConstraints, videoConstraints) => { | |
const constraints = { | |
video: videoConstraints || true | |
}; | |
if (this.props.audio) { | |
constraints.audio = audioConstraints || true; | |
} | |
navigator.mediaDevices | |
.getUserMedia(constraints) | |
.then(stream => { | |
Webcam.mountedInstances.forEach(instance => | |
instance.handleUserMedia(null, stream) | |
); | |
}) | |
.catch(e => { | |
Webcam.mountedInstances.forEach(instance => | |
instance.handleUserMedia(e) | |
); | |
}); | |
}; | |
if ('mediaDevices' in navigator) { | |
sourceSelected(this.props.audioConstraints, this.props.videoConstraints); | |
} else { | |
const optionalSource = id => ({ optional: [{ sourceId: id }] }); | |
const constraintToSourceId = constraint => { | |
const deviceId = (constraint || {}).deviceId; | |
if (typeof deviceId === 'string') { | |
return deviceId; | |
} else if (Array.isArray(deviceId) && deviceId.length > 0) { | |
return deviceId[0]; | |
} else if (typeof deviceId === 'object' && deviceId.ideal) { | |
return deviceId.ideal; | |
} | |
return null; | |
}; | |
MediaStreamTrack.getSources(sources => { | |
let audioSource = null; | |
let videoSource = null; | |
sources.forEach(source => { | |
if (source.kind === 'audio') { | |
audioSource = source.id; | |
} else if (source.kind === 'video') { | |
videoSource = source.id; | |
} | |
}); | |
const audioSourceId = constraintToSourceId(this.props.audioConstraints); | |
if (audioSourceId) { | |
audioSource = audioSourceId; | |
} | |
const videoSourceId = constraintToSourceId(this.props.videoConstraints); | |
if (videoSourceId) { | |
videoSource = videoSourceId; | |
} | |
sourceSelected( | |
optionalSource(audioSource), | |
optionalSource(videoSource) | |
); | |
}); | |
} | |
Webcam.userMediaRequested = true; | |
} | |
handleUserMedia(err, stream) { | |
if (err) { | |
this.setState({ hasUserMedia: false }); | |
this.props.onUserMediaError(err); | |
return; | |
} | |
this.stream = stream; | |
try { | |
this.video.srcObject = stream; | |
this.setState({ hasUserMedia: true }); | |
} catch (error) { | |
this.setState({ | |
hasUserMedia: true, | |
src: window.URL.createObjectURL(stream) | |
}); | |
} | |
this.props.onUserMedia(); | |
} | |
onVideoPlay = () => { | |
this.getCanvas(); | |
this.setState({isPlaying: true}) | |
} | |
drawFeaturePoints = (img, featurePoints) => { | |
var contxt = this.canvas2.getContext('2d'); | |
//debugger; | |
var hRatio = contxt.canvas.width / img.width; | |
var vRatio = contxt.canvas.height / img.height; | |
var ratio = Math.min(hRatio, vRatio); | |
contxt.strokeStyle = "#FFFFFF"; | |
for (var id in featurePoints) { | |
contxt.beginPath(); | |
contxt.arc(featurePoints[id].x, | |
featurePoints[id].y, 2, 0, 2 * Math.PI); | |
contxt.stroke(); | |
} | |
} | |
render() { | |
return ( | |
<div className="cam-container"> | |
<div className="contain"> | |
<video | |
autoPlay | |
width="276" | |
height="200" | |
src={this.state.src} | |
muted={this.props.audio} | |
className={`${this.props.className} ${this.state.isPlaying === true ? 'disable': 'visible'}`} | |
playsInline | |
ref={ref => { | |
this.video = ref; | |
}} | |
onPlaying={this.getCanvas} | |
/> | |
<canvas | |
ref={ref => { | |
this.canvas = ref; | |
}} | |
className="front" | |
/> | |
<canvas | |
ref={ref => { | |
this.canvas2 = ref; | |
}} | |
style={{width: "500", height: "400"}} | |
className="response-canvas" | |
/> | |
</div> | |
</div> | |
); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment