Skip to content

Instantly share code, notes, and snippets.

@michaelbromley
Created March 25, 2015 19:16
Show Gist options
  • Save michaelbromley/b088c4ca98b35f10000f to your computer and use it in GitHub Desktop.
Save michaelbromley/b088c4ca98b35f10000f to your computer and use it in GitHub Desktop.
ViennaJS Talk on Web Audio & Video Capture
<!doctype html>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<title>Audio & Video In JavaScript</title>
<script>
/**
__ __ _ _ _ _____ _____
\ \ / / | | /\ | (_) /\ | __ \_ _|
\ \ /\ / /__| |__ / \ _ _ __| |_ ___ / \ | |__) || |
\ \/ \/ / _ \ '_ \ / /\ \| | | |/ _` | |/ _ \ / /\ \ | ___/ | |
\ /\ / __/ |_) | / ____ \ |_| | (_| | | (_) | / ____ \| | _| |_
\/ \/ \___|_.__/ /_/ \_\__,_|\__,_|_|\___/ /_/ \_\_| |_____|
It's not the HTML <audio> element...
IT'S A GRAPH!
(source)->(node)->(node)->(destination) ♫ ♫ ♫
(node) = some type of transformation / signal processing
*/
var context = null; // the AudioContext (the boss)
/**
* Check for browser support
*/
(function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
})();
/**
* Load an audio file.
*/
var audioBuffer;
function loadAudioFile(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
audioBuffer = buffer;
playSound(audioBuffer)
});
};
request.send();
}
loadAudioFile('assets/funky_drummer.mp3');
/**
* Once loaded, use the file as the source node of our graph.
*/
var source = context.createBufferSource(); // creates a sound source
function playSound(audioBuffer) {
source.buffer = audioBuffer; // tell the source which sound to play
source.loop = true; // set it to loop
source.start(0); // play the source now
}
/**
* The simplest graph:
*
* Connect the source to the context's destination (the speakers)
*
* (source) -> (destination)
*/
source.connect(context.destination);
/**
* Let's add a node!
* Gain == volume
*
* (source) -> (gainNode) -> (destination)
*/
/*var gainNode = context.createGain();
gainNode.gain.value = 0.1;
source.connect(gainNode);
gainNode.connect(context.destination);*/
/**
* There are many built-in node types for different
* kinds of signal processing!
*
* Let's try some interesting ones:
*/
/**
* Let's add some reverb using a convolver node
*
* (source) -> (convolverNode) -> (destination)
*/
/*var convolverNode = context.createConvolver();
loadImpulseResponse();
source.connect(convolverNode);
convolverNode.connect(context.destination);*/
// a convolver needs another audio sample to work with.
function loadImpulseResponse() {
var request = new XMLHttpRequest();
request.open('GET', 'assets/church.wav', true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response, function (buffer) {
convolverNode.buffer = buffer;
});
};
request.send();
}
/**
* Add distortion with a waveShaperNode
*
* (source) -> (delayNode) -> (destination)
*/
/*var waveShaperNode = context.createWaveShaper();
waveShaperNode.overSample = '4x';
waveShaperNode.curve = makeDistortionCurve(500);
source.connect(waveShaperNode);
waveShaperNode.connect(context.destination);
*/
// Don't ask me what is going on here.
// I just stole it from MDN...
function makeDistortionCurve(amount) {
var k = amount,
n_samples = 44100,
curve = new Float32Array(n_samples),
deg = Math.PI / 180,
i = 0,
x;
for ( ; i < n_samples; ++i ) {
x = i * 2 / n_samples - 1;
curve[i] = ( 3 + k ) * x * 20 * deg / ( Math.PI + k * Math.abs(x) );
}
return curve;
}
/**
* Let's put all of those together to make some beautiful music!
*
* (source) -> (waveShaperNode) -> (convolverNode) -> (gainNode) -> (destination)
*/
/*var waveShaperNode = context.createWaveShaper();
waveShaperNode.overSample = '4x';
waveShaperNode.curve = makeDistortionCurve(500);
var convolverNode = context.createConvolver();
loadImpulseResponse();
var gainNode = context.createGain();
gainNode.gain.value = 0.5;
// wire it up
source.connect(waveShaperNode);
waveShaperNode.connect(convolverNode);
convolverNode.connect(gainNode);
gainNode.connect(context.destination);*/
</script>
</head>
<body>
</body>
</html>
<!DOCTYPE html>
<html>
<head lang="en">
<meta charset="UTF-8">
<title>Audio & Video In JavaScript</title>
<script>
/**
__ ___ _ _____ _
\ \ / (_) | | / ____| | |
\ \ / / _ __| | ___ ___ | | __ _ _ __ | |_ _ _ _ __ ___
\ \/ / | |/ _` |/ _ \/ _ \ | | / _` | '_ \| __| | | | '__/ _ \
\ / | | (_| | __/ (_) | | |___| (_| | |_) | |_| |_| | | | __/
\/ |_|\__,_|\___|\___/ \_____\__,_| .__/ \__|\__,_|_| \___|
| |
|_|
Using getUserMedia()!
*/
/**
* Again, we need to check for browser support.
*/
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (!navigator.getUserMedia) {
alert('Sorry, the browser you are using doesn\'t support getUserMedia');
}
/**
* BTW, What is "navigator"???
*
*
*
*
*
*
*
*
*
*
* "It allows scripts to query it and to register themselves to carry on some activities." - MDN
*
*/
</script>
</head>
<body>
<!-- Here is an HTML5 <video> element, and a button that we'll use to kick things off. -->
<video id="video" width="640" height="480"></video>
<button id="start">Start Capturing Video</button>
<script>
var videoElement = document.querySelector('#video');
var startButton = document.querySelector('#start');
</script>
<script>
/**
* When the button is clicked, we'll use the getUserMedia() API to start capturing video.
*/
startButton.addEventListener('click', startCapture);
function startCapture() {
// Request access to video only
var config = {
video:true,
audio:false
};
navigator.getUserMedia(config, success, error);
}
/**
* `stream` is a "local media stream" - part of the WebRTC family of APIs.
*
* URL.createObjectUrl takes a file/blob and returns a URL referencing it.
*
*/
function success(stream) {
videoElement.src = URL.createObjectURL(stream);
videoElement.play();
}
function error(error) {
alert('Something went wrong. (error code ' + error.code + ')');
}
</script>
<br>
<!-- Now we will add a <canvas> element and use it to capture a video frame -->
<canvas id="canvas1" width="640" height="480"></canvas>
<button id="captureButton1">Capture a frame</button>
<script>
var canvasElement1 = document.querySelector('#canvas1'),
canvas1 = canvasElement1.getContext('2d'),
captureButton1 = document.querySelector('#captureButton1');
captureButton1.addEventListener('click', captureFrame);
/**
* .drawImage() takes an image source such as:
*
* - <img> element
* - <video> element
* - another <canvas> element
*
* followed by origin x,y coordinates and width & height.
*/
function captureFrame() {
canvas1.drawImage(videoElement, 0, 0, videoElement.width, videoElement.height);
}
</script>
<br>
<!-- Now we know how to get the video stream into a canvas,
we can do some even cooler stuff! -->
<canvas id="canvas2" width="640" height="480"></canvas>
<button id="captureButton2">Process image data</button>
<script>
var canvasElement2 = document.querySelector('#canvas2'),
canvas2 = canvasElement2.getContext('2d'),
captureButton2 = document.querySelector('#captureButton2');
captureButton2.addEventListener('click', processImageData);
/**
* getImageData() returns an object containing a byte array.
*
* This array contains 4 values per pixel, corresponding to
* the red, green, blue, and alpha channels for that pixel,
* each a value between 0 and 255.
*
* Therefore, our 640 x 480 image should contain
* 640 * 480 * 4 = 1,228,800 values.
*
*/
function processImageData() {
var sourceData = canvas1.getImageData(0, 0, 640, 480);
var processedData = canvas2.createImageData(640, 480);
for(var i = 0; i < sourceData.data.length; i ++){
var val = sourceData.data[i];
// make it red
/*if (i % 4 === 0) {
val = 255;
}*/
// invert the colours (but not the alpha channel)
if ((i - 3) % 4 !== 0) {
val = 255 - val;
}
processedData.data[i] = val;
}
canvas2.putImageData(processedData, 0, 0);
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment