Skip to content

Instantly share code, notes, and snippets.

@indefinit
Last active August 29, 2015 14:01
Show Gist options
  • Save indefinit/ae23f5f4fbcc2f9bde2d to your computer and use it in GitHub Desktop.
Save indefinit/ae23f5f4fbcc2f9bde2d to your computer and use it in GitHub Desktop.
Generic Sound module for using the web audio api. A WIP. No guarantees :)

##To use: In your main script file (or inline HTML)

    //create new sound object
    var context = new SONO.ctx();
    var buffer = new SONO.BufferLoader(context, urls, onBufferLoaded);
    
    function onBufferLoaded(buffers){
      source = context.createBufferSource();
      source.buffer = buffer.bufferList[0]; //here only assigning 1 buffer to source
      source.connect(context.destination);
      source[source.start ? 'start': 'noteOn'](0); //backwards compatability for start and noteon methods
    }

    buffer.load();
/* ___ ___ ___ ___ ___ ___
___ /\__\ /\ \ /\ \ /\ \ ___ /\__\ ___ /\ \
/\ \ /::| | /::\ \ /::\ \ /::\ \ /\ \ /::| | /\ \ \:\ \
\:\ \ /:|:| | /:/\:\ \ /:/\:\ \ /:/\:\ \ \:\ \ /:|:| | \:\ \ \:\ \
/::\__\ /:/|:| |__ /:/ \:\__\ /::\~\:\ \ /::\~\:\ \ /::\__\ /:/|:| |__ /::\__\ /::\ \
__/:/\/__/ /:/ |:| /\__\ /:/__/ \:|__| /:/\:\ \:\__\ /:/\:\ \:\__\ __/:/\/__/ /:/ |:| /\__\ __/:/\/__/ /:/\:\__\
/\/:/ / \/__|:|/:/ / \:\ \ /:/ / \:\~\:\ \/__/ \/__\:\ \/__/ /\/:/ / \/__|:|/:/ / /\/:/ / /:/ \/__/
\::/__/ |:/:/ / \:\ /:/ / \:\ \:\__\ \:\__\ \::/__/ |:/:/ / \::/__/ /:/ /
\:\__\ |::/ / \:\/:/ / \:\ \/__/ \/__/ \:\__\ |::/ / \:\__\ \/__/
\/__/ /:/ / \::/__/ \:\__\ \/__/ /:/ / \/__/
\/__/ ~~ \/__/ \/__/
* http://studioindefinit.com
* SONO - An Indefinit Web Audio API Library by Studio Indefinit
*/
var SONO = { REV : '1'};
(function(){
var self = this;
var callback, ctx;
// Force polyfill for Web Audio
// by @jonobr1 / http://jonobr1.com/
//self.addEventListener('load', function() {
self.AudioContext = self.AudioContext || self.webkitAudioContext;
SONO._ready = true;
try {
SONO.ctx = ctx = new self.AudioContext();
SONO.has = true;
} catch (e) {
delete SONO.ctx;
SONO.has = false;
}
//}, false);
})();
// http://paulirish.com/2011/requestanimationframe-for-smart-animating/
// http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
// requestAnimationFrame polyfill by Erik Möller
// fixes from Paul Irish and Tino Zijdel
// using 'self' instead of 'window' for compatibility with both NodeJS and IE10.
( function () {
var lastTime = 0;
var vendors = [ 'ms', 'moz', 'webkit', 'o' ];
for ( var x = 0; x < vendors.length && !self.requestAnimationFrame; ++ x ) {
self.requestAnimationFrame = self[ vendors[ x ] + 'RequestAnimationFrame' ];
self.cancelAnimationFrame = self[ vendors[ x ] + 'CancelAnimationFrame' ] || self[ vendors[ x ] + 'CancelRequestAnimationFrame' ];
}
if ( self.requestAnimationFrame === undefined && self['setTimeout'] !== undefined ) {
self.requestAnimationFrame = function ( callback ) {
var currTime = Date.now(), timeToCall = Math.max( 0, 16 - ( currTime - lastTime ) );
var id = self.setTimeout( function() { callback( currTime + timeToCall ); }, timeToCall );
lastTime = currTime + timeToCall;
return id;
};
}
if( self.cancelAnimationFrame === undefined && self['clearTimeout'] !== undefined ) {
self.cancelAnimationFrame = function ( id ) { self.clearTimeout( id ) };
}
}() );
SONO.BufferLoader = function(audioCtx, urlList, callback) {
this.audioCtx = audioCtx;
this.urlList = urlList;
this.onload = callback ;
this.bufferList = new Array();
this.loadCount = 0;
}
SONO.BufferLoader.prototype = {
loadBuffer : function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.audioCtx.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList); //@TODO here
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
},
load : function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
},
};
SONO.Visualizer = function(bufferList, fftSize, smoothing) {
this.analyser = SONO.ctx.createAnalyser();
this.smoothing = smoothing || 0.8;
this.fftSize = fftSize || 2048;
this.analyser.connect(SONO.ctx.destination);
this.analyser.minDecibels = -140;
this.analyser.maxDecibels = 0;
this.freqs = new Uint8Array(this.analyser.frequencyBinCount); // data range is from 0 - 256 for 512 bins. no sound is 0;
this.times = new Uint8Array(this.analyser.frequencyBinCount); // data range is from 0-256 for 512 bins. no sound is 128.
this.buffer = bufferList[0];
this.isPlaying = false;
this.startTime = 0;
this.startOffset = 0;
this.levelsCount = 16; //should be factor of 512
this.levelBins = Math.floor(this.analyser.frequencyBinCount / this.levelsCount);
this.gotBeat = false;
this.beatTime = 0;
}
SONO.Visualizer.prototype = {
// Toggle playback
togglePlayback : function() {
if (this.isPlaying) {
// Stop playback
this.source[this.source.stop ? 'stop': 'noteOff'](0);
this.startOffset += SONO.ctx.currentTime - this.startTime;
console.log('paused at', this.startOffset);
// Save the position of the play head.
} else {
this.startTime = SONO.ctx.currentTime;
console.log('started at', this.startOffset);
this.source = SONO.ctx.createBufferSource();
// Connect graph
this.source.connect(this.analyser);
this.source.buffer = this.buffer;
this.source.loop = true;
// Start playback, but make sure we stay in bound of the buffer.
this.source[this.source.start ? 'start' : 'noteOn'](0, this.startOffset % this.buffer.duration);
// Start visualizer.
requestAnimationFrame(this.analyze.bind(this));
}
this.isPlaying = !this.isPlaying;
return this;
},
analyze : function(){
this.analyser.smoothingTimeConstant = this.smoothing;
this.analyser.fftSize = this.fftSize;
// Get the frequency data from the currently playing music
this.analyser.getByteFrequencyData(this.freqs);
this.analyser.getByteTimeDomainData(this.times);
if (this.isPlaying) {
requestAnimationFrame(this.analyze.bind(this));
}
return this;
},
drawDebug : function() {
var width = Math.floor(1/this.freqs.length, 10);
var canvas = document.querySelector('canvas');
var drawContext = canvas.getContext('2d');
var peakVal;
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
// Draw the frequency domain chart.
for (var i = 0; i < this.analyser.frequencyBinCount; i++) {
var value = this.freqs[i];
var percent = value / 256;
var height = canvas.height * percent;
var offset = canvas.height - height - 1;
var barWidth = canvas.width/this.analyser.frequencyBinCount;
var hue = i/this.analyser.frequencyBinCount * 360;
drawContext.fillStyle = 'hsl(' + hue + ', 100%, 50%)';
drawContext.fillRect(i * barWidth, offset, barWidth, height);
}
//console.log(this.times[this.analyser.frequencyBinCount /2] / 256);
// Draw the time domain chart.
// for (var i = 0; i < this.analyser.frequencyBinCount; i++) {
// var height = height * percent;
// var offset = height / 2;
// var barWidth = width/this.analyser.frequencyBinCount;
// drawContext.fillStyle = 'black';
// drawContext.fillRect(i * barWidth, offset, 1, 2);
// }
if (this.isPlaying) {
requestAnimationFrame(this.draw.bind(this));
}
return this;
},
getFrequencyLevel : function(freq) {
var nyquist = SONO.ctx.sampleRate/2;
var index = Math.round(freq/nyquist * this.freqs.length);
return this.freqs[index];
},
//@TODO need to create custom event here
onBeat : function(){
this.gotBeat = true;
},
//@TODO this needs method some math love
detectBeat : function(){
if (this.aveLevel > this.beatCutOff && this.aveLevel > this.BEAT_MIN){
this.onBeat();
this.beatCutOff = this.aveLevel *1.1;
this.beatTime = 0;
}//else{
// if (beatTime <= ControlsHandler.audioParams.beatHoldTime){
// beatTime ++;
// }else{
// beatCutOff *= ControlsHandler.audioParams.beatDecayRate;
// beatCutOff = Math.max(this.beatCutOff,this.BEAT_MIN);
// }
// }
// bpmTime = (new Date().getTime() - bpmStart)/msecsAvg;
}
};
Object.defineProperties(SONO.Visualizer.prototype, {
aveLevel : {
get : function(){
var levelsData = new Array(this.levelsCount);
var sum;
//normalize levelsData from freqByteData
for(var i = 0; i < this.levelsCount; i++) {
sum = 0;
for(var j = 0; j < this.levelBins; j++) {
sum += this.freqs[(i * this.levelBins) + j];
}
levelsData[i] = sum / this.levelBins / 256; //freqData maxs at 256
//adjust for the fact that lower levels are percieved more quietly
//make lower levels smaller
//levelsData[i] *= 1 + (i/levelsCount)/2;
}
//TODO - cap levels at 1?
//GET AVG LEVEL
sum = 0;
for(var j = 0; j < this.levelsCount; j++) {
sum += levelsData[j];
}
return sum / this.levelsCount;
}
},
beatCutOff : {
value : 0,
writable : true,
configurable : true,
enumerable : true
},
BEAT_MIN : {
value : 0.15, // defaults a volume less than this is no beat
writable : true,
configurable : true,
enumerable : true
}
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment