Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

@Sphinxxxx
Last active November 19, 2018 17:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Sphinxxxx/dde3f7cedffc08b5675f5db5554cdc5c to your computer and use it in GitHub Desktop.
Save Sphinxxxx/dde3f7cedffc08b5675f5db5554cdc5c to your computer and use it in GitHub Desktop.
Web Audio Spectrogram
<script>
console.clear();
if (!window.AudioContext) {
if (!window.webkitAudioContext) {
alert('AudioContext not supported!');
}
window.AudioContext = window.webkitAudioContext;
}
if (!navigator.mediaDevices.getUserMedia) {
alert('getUserMedia not supported!');
}
class MAGraph {
constructor(periods) {
this.periods = periods.map(Math.round);
this.data = [];
this.ma = null;
this._mas = this.periods.map(p => ({
slidingSum: 0,
values: [],
}));
}
get length() {
return this.data.length;
}
add(x) {
if(Array.isArray(x) || ArrayBuffer.isView(x)) {
x.forEach(a => this.add(a));
return;
}
this.data.push(x);
this.periods.forEach((p, i) => {
const data = i ? this._mas[i - 1].values
: this.data;
MAGraph._calcMA(data, p, this._mas[i]);
});
if(!this.ma) {
this.ma = this._mas[this.periods.length - 1].values;
}
//DEBUG
if(this.data.length === 100) {
//console.log('gma', JSON.stringify(this, null, 4));
}
//DEBUG
}
static _calcMA(data, p, maState) {
const len = data.length,
x = data[len - 1];
let sum = maState.slidingSum + x,
maVal;
if(len > p) {
sum -= data[len - p -1];
maVal = sum/p;
}
else {
maVal = sum/len;
}
maState.values.push(maVal);
maState.slidingSum = sum;
}
}
</script>
<script src="//unpkg.com/abo-utils@0.3"></script>
<script>//ABOUtils.Debug.alertErrors();</script>
<script src="//unpkg.com/chroma-js@1"></script>
<script src="//unpkg.com/stats.js@0.17"></script>
<script src="//unpkg.com/vue@2"></script>
<main id="app">
<ul class="tabitems">
<li><button @click="view.tuner = true" :class="{ active: view.tuner }">Tuner (WIP)</button></li>
<li><button @click="view.tuner = false" :class="{ active: !view.tuner }">Unisons</button></li>
</ul>
<div id="tuner" class="tab" v-show="view.tuner">
<h2>Time (waveform)</h2>
<canvas id="kept-wave" height="0"></canvas>
<div class="tools">
<label id="tuner-target">
<span>Target frequency</span>
<input type="number" v-model.number="tuner.freq" />
</label>
<button id="keep-wave" @click="keepWave">Keep</button>
<button id="restart" @click="restart">Restart</button>
</div>
<canvas id="tuner-wave"></canvas>
<h2>Frequency (FFT)</h2>
<canvas id="c2"></canvas>
</div>
<div id="unisons" class="tab" v-show="!view.tuner">
<div class="tools">
<button id="keep">Keep</button>
<button id="restart" @click="restart">Restart</button>
<label>
<input type="checkbox" v-model="view.loudness" />
Loudness mask
</label>
<label>
<input type="checkbox" v-model="plot.squaredMagn" />
Squared magnitude
</label>
</div>
<div id="container">
<canvas id="kept"></canvas>
<canvas id="live"></canvas>
<canvas v-show="view.loudness" id="loudness"></canvas>
</div>
<h2>
For more information on how this is done, look at
<a href="http://www.smartjava.org/content/exploring-html5-web-audio-visualizing-sound">this article</a>.
</h2>
</div>
</main>
//http://www.smartjava.org/content/exploring-html5-web-audio-visualizing-sound
//https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API/Visualizations_with_Web_Audio_API
(function() {
"use strict";
const ud = ABOUtils.DOM,
uc = ABOUtils.Canvas,
[$, $$] = ud.selectors();
const _state = {
tuner: {
freq: 1300,
magn: 200,
//Sample values in seconds:
sampleTimes: [0, .03, .1, .30, 1, 3],
sampleLen: .01,
},
plot: {
approxFPS: 200,
zoom: /**/ [8, 2], //**/ [1,1],
maxWidth: 1500,
historySec: 3,
smoothing: 0, //0-1
//https://en.wikipedia.org/wiki/Short-time_Fourier_transform
//"The magnitude squared of the STFT yields the spectrogram representation (...)"
//squaredMagn: true,
deciRange: /**/ [-120, -20], //*/[-150, -50],
//https://gka.github.io/chroma.js/
colorScale: chroma.scale(['#000', '#a0a', '#66f', '#0f0', '#ff0', '#f00', '#fff'])
.mode('lrgb')
.colors(256, 'rgb'),
},
reset: {
volume: {
min: 10000, //0-255
factor: 1.5,
},
loop: false,
clear: true,
},
view: {
loudness: true,
tuner: true,
}
};
new Vue({
el: '#app',
data: _state,
methods: {
restart() {
uiRestart = true;
},
keepWave() {
const canv = $('#kept-wave'),
ctx = canv.getContext('2d');
canv.width = canWave.width;
canv.height = canWave.height;
ctx.drawImage(canWave, 0,0);
}
},
});
// load the sound
let audioContext, analyser, buffer, fps, freqStep;
setupAudioNodes();
//Init UI
const canWave = $('#tuner-wave'),
ctxWave = canWave.getContext('2d');
const plt = _state.plot,
uiWidth = Math.min(analyser.frequencyBinCount * plt.zoom[0], plt.maxWidth),
uiHistory = Math.round(fps * plt.historySec * plt.zoom[1]), //Height of spectogram
uiLegend = 20,
freqMarkers = [],
canvas = new uc.CanvasPixelBuffer($('#live'), uiWidth, uiHistory + uiLegend);
let uiStale = false,
uiRestart = false;
let stats;
function initUI() {
function getUIX(hz) {
return Math.round(hz * plt.zoom[0] / freqStep);
}
//https://github.com/mrdoob/stats.js/
stats = new Stats();
stats.showPanel(0); // 0: fps, 1: ms, 2: mb, 3+: custom
document.body.appendChild(stats.dom).id = 'stats';
canWave.width = 1200;
canWave.height = 200;
//Draw legend (color scale and kHz):
_state.plot.colorScale.forEach((c, i) => {
for(let y = 0; y < uiLegend/2; y++) {
canvas.setPixel(i, y, c);
}
});
const step = 1000;
let mark = 1, x = 0;
while(x < uiWidth) {
x = getUIX(mark * step);
if(x > uiWidth) { break; }
const h = .1 + (mark%5 ? 0 : .4) + (mark%10 ? 0 : .5);
freqMarkers.push([x, h])
mark ++;
}
freqMarkers.forEach(m => {
let [x, h] = m;
h *= uiLegend/2;
for(let y = 0; y < h; y++) {
canvas.setPixel(x, uiLegend/2 + y, [255, 255, 255]);
}
});
//Paint the "Equal loudness contour":
//https://en.wikipedia.org/wiki/Equal-loudness_contour
const loud = $('#loudness');
const ctx = loud.getContext('2d');
loud.width = canvas.w;
loud.height = canvas.h;
function createGrad(hz1, hz2, color1, color2) {
const x1 = getUIX(hz1),
x2 = getUIX(hz2),
grad = ctx.createLinearGradient(x1,0, x2,0);
grad.addColorStop(0, color1);
grad.addColorStop(1, color2);
ctx.fillStyle = grad;
ctx.fillRect(x1,uiLegend, (x2-x1),canvas.h);
}
createGrad( 0, 200, '#000f', '#0008');
createGrad( 200, 1000, '#0008', '#0004');
createGrad(1000, 1500, '#0004', '#0006');
createGrad(1500, 3500, '#0006', '#0000');
createGrad(3500,10000, '#0000', '#000a');
}
initUI();
function setupAudioNodes() {
const plt = _state.plot;
// create the audio context
const context = audioContext = new AudioContext({
//https://developer.mozilla.org/en-US/docs/Web/API/AudioContextOptions#Browser_compatibility
//"Connecting AudioNodes from AudioContexts with different sample-rate is currently not supported."
// sampleRate: 8000,
});
//https://stackoverflow.com/questions/44502536/determining-frequencies-in-js-audiocontext-analysernode
const sr = context.sampleRate;
let samplesPerFrame = sr / plt.approxFPS;
//Round the sample size to the nearest power of 2 (required for .fftSize):
samplesPerFrame = Math.pow(2, Math.round(Math.log2(samplesPerFrame)));
fps = sr/samplesPerFrame;
console.log('spf', samplesPerFrame, 'fps', fps);
const processor = context.createScriptProcessor(samplesPerFrame, 1, 1);
processor.onaudioprocess = loop;
processor.connect(context.destination);
//The rest of the .connect() is done in the getUserMedia() promise below (when we get microphone data).
// setup an analyzer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = plt.smoothing;
//https://stackoverflow.com/questions/14789283/what-does-the-fft-data-in-the-web-audio-api-correspond-to
analyser.fftSize = samplesPerFrame;
analyser.minDecibels = plt.deciRange[0];
analyser.maxDecibels = plt.deciRange[1];
//console.log(analyser);
const bins = analyser.frequencyBinCount;
buffer = new Uint8Array(bins);
const maxFreq = sr/2;
freqStep = maxFreq / bins;
console.log('Freq', 0, maxFreq, 'Step', freqStep, 'Bins', bins);
navigator.mediaDevices.getUserMedia({
audio: {
//https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints#Browser_compatibility
//sampleRate: 8000,
}
})
.then(stream => {
//Mic stream
const source = context.createMediaStreamSource(stream);
source.connect(analyser);
analyser.connect(processor);
//requestAnimationFrame(loop);
})
.catch(err => {
//console.dir(err);
console.error('The following gUM error occured: ' + err);
});
}
// we use information from the analyzer node to draw the volume
function loop(e) {
//if(_state.view.tuner) { return; }
//console.log(e);
//debugger;
stats.begin();
collectWave(e);
analyser.getByteFrequencyData(buffer);
drawSpectrogram(buffer);
stats.end();
//requestAnimationFrame(loop);
}
const wavesMaxLen = audioContext.sampleRate * 3.5,
wavePeriod = Math.round(audioContext.sampleRate / _state.tuner.freq),
waveMAPeriods = [wavePeriod/2, wavePeriod/3, wavePeriod/5, wavePeriod/7];
console.log('MA samples', wavePeriod)
let waveBuffer = new MAGraph(waveMAPeriods), //[],
collectingWaves = false;
function collectWave(e) {
//Collect waveform data until we have 1s worth of data:
let waveData = e.inputBuffer.getChannelData(0);
//It takes a little time before we actually get microphone data..
if(!collectingWaves) { collectingWaves = (waveData[0] !== 0); }
if(collectingWaves && (waveBuffer.length < wavesMaxLen)) {
waveBuffer.add(waveData); //.push.apply(waveBuffer, waveData);
//console.log(waveBuffer.length);
//First batch of wave data.
//Normalize the start of a wave to have consistent rendering (avoid jumping):
if((waveBuffer.length > wavePeriod) && !waveBuffer.__sampleOffset) {
const data = waveBuffer.ma;
let x = wavePeriod,
y1, y2;
for(; x < data.length; x++) {
y1 = data[x-1];
y2 = data[x];
if((y1 < 0) && (y2 >= 0)) { break; }
}
if(x >= data.length) {
//No usable data yet:
return;
}
else {
waveBuffer.__sampleOffset = x;
}
}
if(waveBuffer.__sampleOffset) {
drawStrobe(waveBuffer);
}
}
}
function clear() {
//canvas.clear();
canvas.targetContext.clearRect(0, uiLegend, canvas.w,canvas.h);
canvas.sync();
}
let row = 0,
volPrev = 0;
const colorSeparator = [0, 0, 0];
function drawSpectrogram(array) {
const plt = _state.plot,
res = _state.reset,
volTrigger = res.volume,
[pxW, pxH] = plt.zoom;
//Auto-reset by volume
const volCurr = array.reduce((a, b) => a + b, 0); //Math.max.apply(Math, array);
if((volCurr > volTrigger.min) && (volCurr > (volPrev * volTrigger.factor)) ) {
uiRestart = true;
console.log('Reset by volume', Math.round(volPrev), volCurr);
}
volPrev = (volCurr + 2*volPrev)/3;
//Loop display?
if((row >= uiHistory) && !uiRestart) {
if(res.loop) { uiRestart = true; }
else { return; }
}
if(uiRestart) {
row = 0;
uiRestart = false;
waveBuffer = new MAGraph(waveMAPeriods); //[];
if(res.clear) {
clear();
}
}
function drawRow(y, color) {
let x = 0, pxColor;
for (let i = 0; i < array.length; i++) {
if(x >= uiWidth) { break; }
// draw each pixel with the specific color
if(color) { pxColor = color; }
else {
const magn = plt.squaredMagn ? Math.round(array[i] * array[i] / 255) : array[i];
pxColor = plt.colorScale[magn];
}
for(let xx = 0; xx < pxW; xx++) {
canvas.setPixel(x, y, pxColor);
//canvas.setPixel(x, y + 1, colorSeparator);
x++;
}
}
}
for(let yy = 0; yy < pxH; yy++) {
drawRow(uiLegend + row + yy);
}
drawRow(uiLegend + row + pxH, colorSeparator);
//canvas.render();
row = (row + pxH);
uiStale = true;
}
const c2 = $('#c2'),
ctx2 = c2.getContext('2d'),
h2 = 255.5;
let b2 = new Float32Array(analyser.frequencyBinCount);
c2.width = uiWidth;
c2.height = h2 + 5;
let maxB2 = -99999, minB2 = 99999;
function drawFreq() {
const plt = _state.plot,
[pxW, pxH] = plt.zoom;
let x = 0;
ctx2.clearRect(0,0, uiWidth, h2 + 999);
//Mark every kHz:
freqMarkers.forEach(m => {
const x = m[0] + .5,
h = m[1] * h2;
ctx2.beginPath();
ctx2.strokeStyle = 'white';
ctx2.moveTo(x, 0);
ctx2.lineTo(x, h);
ctx2.stroke();
});
//(Logarithmic) decibels from -255 to 0?
//Same as getByteFrequencyData with min/maxDecibels -255/0
analyser.getFloatFrequencyData(b2);
ctx2.strokeStyle = 'lime';
drawGraph(ctx2, b2, x => x * pxW, y => -y);
//Decibels within min/maxDecibels
analyser.getByteFrequencyData(buffer);
ctx2.strokeStyle = 'hotpink';
drawGraph(ctx2, buffer, x => x * pxW, y => h2 - y);
}
//TODO..
function drawStrobe(waveform) {
const sr = audioContext.sampleRate,
tun = _state.tuner,
sampleBatch = Math.round(tun.sampleLen * sr),
sampleStarts = tun.sampleTimes.map(x => Math.round(x * sr));
const sampleGraphW = canWave.width / tun.sampleTimes.length,
zoomX = sampleGraphW / sampleBatch,
centerY = Math.round(canWave.height/2) + .5,
multY = tun.magn;
//Clear the graph, and draw a 0 line and target wave length:
ctxWave.clearRect(0,0, canWave.width, canWave.height);
ctxWave.strokeStyle = 'gray';
drawGraph(ctxWave, [centerY, centerY], x => x * canWave.width);
ctxWave.strokeStyle = 'yellow';
drawGraph(ctxWave, [centerY*1.1, centerY*1.1], x => x * zoomX * wavePeriod);
sampleStarts.forEach((ss, i) => {
const sampleStart = waveform.__sampleOffset + ss;
const data = waveform.data.slice(sampleStart, sampleStart + sampleBatch),
ma = waveform.ma.slice(sampleStart, sampleStart + sampleBatch);
//Not enough data yet for this sample batch:
if(data.length === 0) { return; }
const xStart = Math.round(i * sampleGraphW) + .5;
ctxWave.strokeStyle = 'green';
drawGraph(ctxWave, data, x => xStart + (x * zoomX), y => y*multY + centerY);
ctxWave.strokeStyle = 'yellow';
drawGraph(ctxWave, ma, x => xStart + (x * zoomX), y => y*multY + centerY);
//Divider line between batches:
if(i) {
ctxWave.strokeStyle = 'silver';
drawGraph(ctxWave, [0, 200], x => xStart);
}
});
}
function drawGraph(ctx, data, modX, modY) {
if(modY) {
data = data.map(modY);
}
ctx.beginPath();
let x;
for (var i = 0, len = data.length; i < len; i++) {
x = modX ? modX(i) : i;
(i === 0) ? ctx.moveTo(x, data[i])
: ctx.lineTo(x, data[i]);
}
ctx.stroke();
}
let renders = 0;
(function render() {
if(uiStale) {
canvas.render();
//if(renders % 4 === 0)
{
drawFreq();
}
uiStale = false;
renders++;
}
requestAnimationFrame(render);
})();
$('#keep').onclick = function(e) {
const kept = $('#kept'),
w = kept.width = canvas.w,
h = kept.height = uiLegend - 2; //canvas.h;
kept.getContext('2d').drawImage($('#live'), 0,uiLegend, w,h, 0,0, w,h);
uiRestart = true;
}
})();
$color-bg: black;
$color-tab: #333;
body {
font-family: Georgia, sans-serif;
background: $color-bg;
color: #eee;
ul {
margin: 0;
padding: 0;
list-style: none;
}
button, input {
font: inherit;
}
button {
cursor: pointer;
padding: .3em .6em;
}
canvas {
display: block;
}
}
.tabitems {
display: flex;
li:not(:first-child) {
margin-left: .5em;
}
button {
padding: 1em;
border: none;
background: rgba($color-tab, .5);
color: #eee;
&:focus {
outline: .2em dashed $color-tab;
}
&:hover, &.active {
background: $color-tab;
}
}
}
.tab {
padding: 1em;
> :first-child {
margin-top: 0;
}
> :last-child {
margin-bottom: 0;
}
background: $color-tab;
color: #eee;
}
#tuner {
canvas {
background: black;
}
}
.tools {
margin-bottom: .5em;
input[type="number"] {
width: 6ch;
text-align: right;
}
}
#container {
position: relative;
#kept {
position: absolute;
top: 0; left: 0;
//opacity: .35;
}
#live {
position: relative;
z-index: 1;
}
#loudness {
position: absolute;
top: 0; left: 0;
z-index: 2;
}
}
#stats {
//position: absolute !important;
left: auto !important;
right: 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment