Skip to content

Instantly share code, notes, and snippets.

@feymartynov
Created December 1, 2019 03:11
Show Gist options
  • Save feymartynov/fdaa2e90ed26cac556c95513dfed17c6 to your computer and use it in GitHub Desktop.
Save feymartynov/fdaa2e90ed26cac556c95513dfed17c6 to your computer and use it in GitHub Desktop.
WebAudio simple single-track sequencer

WebAudio simple single-track sequencer

How to run

  1. Create 4 1-bar wav samples with 120 BPM and 44100 Hz named from 1.wav to 4.wav.
  2. Run a web server to serve static from the project directory, e.g. python -m http.server and open http://localhost:8000. Just opening index.html in the browser won't work because of CORS policy.
  3. Press Play and you should hear your files playing in a sequence.
  4. Try stop/resume playing, seeking when playing or idle by moving the Position slider and also changing the Tempo.
class Clock {
constructor(interval, callback) {
this.interval = interval;
this.callback = callback;
this.timerHandle = null;
}
start() {
this.timerHandle = setInterval(this.callback, this.interval);
postMessage('started');
}
stop() {
if (this.timerHandle) clearInterval(this.timerHandle);
}
setInterval(interval) {
this.interval = interval;
if (this.timerHandle) {
this.stop();
this.start();
}
}
}
const clock = new Clock(1000, () => postMessage('tick'));
self.onmessage = evt => {
switch (evt.data.command) {
case 'start':
clock.start();
break;
case 'stop':
clock.stop();
break;
case 'setInterval':
clock.setInterval(evt.data.interval);
break;
}
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>Single track sequencer</title>
<link rel="stylesheet" type="text/css" href="style.css" />
</head>
<body>
<div>
<label for="tempoSlider">Tempo</label>
<br />
<input id="tempoSlider" class="slider" type="range" min="30.0" max="240.0" step="1" value="120" />
<span id="tempoDisplay">120</span>
</div>
<div>
<label for="positionSlider">Position</label>
<br />
<input id="positionSlider" class="slider" type="range" min="0" max="64" step="1" value="0" />
<span id="positionDisplay">0</span>
</div>
<br />
<div>
<button id="playButton">Play</button>
</div>
<script src="main.js"></script>
</body>
</html>
class Sample {
constructor(audioContext) {
this.audioContext = audioContext;
this.buffer = null;
}
async load(url) {
let response = await fetch(url);
if (!response.ok) throw new Error(`Expected status 200, got ${response.status}`);
let buffer = await this.audioContext.decodeAudioData(await response.arrayBuffer());
if (!buffer) throw new Error(`Error decoding file data: ${url}`);
this.buffer = buffer;
}
getBuffer() {
if (this.buffer) {
return this.buffer;
} else {
throw('Sample not loaded');
}
}
}
///////////////////////////////////////////////////////////////////////////////
class Clip {
constructor(audioContext, id, sample, tempo) {
this.audioContext = audioContext;
this.id = id;
this.buffer = sample.getBuffer();
this.tempo = tempo;
this.source = null;
this.playing = false;
}
getDuration() {
return this.source.buffer.duration;
}
playAt(time, offset, duration, tempo) {
this.source = this.audioContext.createBufferSource();
this.source.buffer = this.buffer;
this.source.playbackRate.value = tempo / this.tempo;
this.source.connect(this.audioContext.destination);
this.source.start(time, offset, duration);
}
stopAt(time) {
if (this.source) this.source.stop(time);
}
onEnded(callback) {
if (this.source) {
this.source.onended = callback;
} else {
throw new Error('Source not set');
}
}
}
///////////////////////////////////////////////////////////////////////////////
class Track {
constructor() {
this.timeline = [];
}
addClip(clip, start, length) {
this.timeline.push({ clip, start, length });
}
getTimeline() {
return this.timeline;
}
}
///////////////////////////////////////////////////////////////////////////////
class Sequencer extends EventTarget {
constructor(audioContext, track) {
super();
this.audioContext = audioContext;
this.track = track;
this.audioUnlocked = false; // See `_unlockAudio` method.
this.setTempo(120.0);
// Tick in a separate thread for better synchronization.
this.clockWorker = new Worker('clock.js');
this.clockWorker.onmessage = evt => evt.data === 'tick' && this._onTick();
this.scheduledClips = {};
this.counter = 0; // Counter for faster access to scheduled clips.
this.playing = false;
this.nextQuant = null; // Next time to increment `position` and send a tick event.
this.playingStoppedAt = 0; // Audio context time when stopped for the last time.
this.position = 0; // Current position in seconds.
}
isPlaying() {
return this.playing;
}
// Get tempo in BPM.
getTempo() {
return this.tempo;
}
// Set tempo in BPM.
setTempo(value) {
this.position *= this.tempo / value; // Scale position.
this.tempo = value; // 1/4th notes per minute.
this.quant = 60.0 / value / 4; // Duration of a 1/16th note in seconds.
this.clockWorker.postMessage({ command: 'setInterval', value: this.quant * 1000 });
this._restart(); // Restart to reschedule clips with new settings.
}
// Set position in seconds.
setPosition(value) {
this.position = value * this.quant;
this._restart(); // Restart to reschedule clips with new settings.
}
_restart() {
if (!this.playing) return;
this.stop();
this.play();
}
play() {
if (this.playing) return;
if (!this.audioUnlocked) this._unlockAudio();
// Schedule a little bit into future to prevent unsync.
let playingStart = this.audioContext.currentTime + this.quant;
let translationDelta = playingStart - this.position;
// Schedule the arrangement.
for (let { clip, start, length } of this.track.getTimeline()) {
let absStart = start * this.quant + translationDelta;
let offset = playingStart - absStart;
let secondsLength = length * this.quant;
let scheduled = true;
if (absStart >= playingStart) {
// Clip is in the future. Schedule it from the beginning.
clip.playAt(absStart, 0, secondsLength, this.tempo);
} else if (offset < secondsLength) {
// Clip is in the middle. Start with the offset.
clip.playAt(playingStart, offset, secondsLength - offset, this.tempo);
} else {
// Clip is in the past. Ignore it.
scheduled = false;
}
if (scheduled) {
// Remember that the clip is scheduled. It is used when stopping playback.
let counter = this.counter++;
this.scheduledClips[counter] = clip;
clip.onEnded(() => delete this.scheduledClips[counter]);
}
}
// Check whether scheduling took to long to warn about possible unsync.
let delta = this.audioContext.currentTime - playingStart;
if (delta > 0) {
console.error(`Unsync detected. Scheduling didn't finished in time. Delta is ${delta}`);
} else if (delta > -0.1) {
console.warn(`About to unsync. Delta is ${delta}`);
}
this.nextQuant = playingStart;
this.playing = true;
this.clockWorker.postMessage({ command: 'start' });
}
stop() {
if (!this.playing) return;
// Schedule stopping clips a little bit into future to prevent unsync.
let stopTime = this.audioContext.currentTime + this.quant;
// Schedule stop of all scheduled clips.
for (let clip of Object.values(this.scheduledClips)) clip.stopAt(stopTime);
this.scheduledClips = {};
this.playing = false;
this.clockWorker.postMessage({ command: 'stop' });
}
_unlockAudio() {
// Play silent buffer for the first time to unlock the audio.
// This prevents freezing when nothing is scheduled right in the beginning.
let buffer = this.audioContext.createBuffer(1, 1, 44100);
let node = this.audioContext.createBufferSource();
node.buffer = buffer;
node.start(0);
this.audioUnlocked = true;
}
_onTick() {
if (!this.playing || this.audioContext.currentTime <= this.nextQuant) return;
this.nextQuant += this.quant
this.position += this.quant;
let time = Math.trunc(this.position / this.quant);
let event = new CustomEvent('tick', { detail: { time } });
setTimeout(() => this.dispatchEvent(event), 0);
}
}
///////////////////////////////////////////////////////////////////////////////
async function composeTrack(audioContext) {
let track = new Track();
// Put 1.wav, 2.wav, 3.wav & 4.wav sequentally in 4 bars.
for (let i of new Array(4).keys()) {
let sample = new Sample(audioContext);
await sample.load(`${i + 1}.wav`);
let clip = new Clip(audioContext, i, sample, 120.0);
track.addClip(clip, i * 16, 16);
}
return track;
}
async function init() {
// Sequencer initialization.
let audioContext = new AudioContext();
if (audioContext.state === 'suspended') audioContext.resume();
let track = await composeTrack(audioContext);
let sequencer = new Sequencer(audioContext, track);
// DOM elements.
let tempoSliderEl = document.getElementById('tempoSlider');
let tempoDisplayEl = document.getElementById('tempoDisplay');
let positionSliderEl = document.getElementById('positionSlider');
let positionDisplayEl = document.getElementById('positionDisplay');
let playButtonEl = document.getElementById('playButton');
// Tempo slider and display.
tempoSliderEl.addEventListener('input', evt => {
sequencer.setTempo(evt.target.value);
tempoDisplayEl.innerText = evt.target.value;
});
// Position slider and display.
positionSliderEl.addEventListener('input', evt => {
sequencer.setPosition(evt.target.value);
positionDisplayEl.innerText = evt.target.value;
})
sequencer.addEventListener('tick', evt => {
positionSliderEl.value = evt.detail.time;
positionDisplayEl.innerText = evt.detail.time;
});
// Play/stop button.
playButtonEl.addEventListener('click', () => {
if (sequencer.isPlaying()) {
sequencer.stop();
playButtonEl.innerText = 'Play';
} else {
sequencer.play();
playButtonEl.innerText = 'Stop';
}
});
}
window.addEventListener('load', init().catch(err => console.error(err)));
#playButton {
font-size: 2em;
}
.slider {
height: 20px;
width: 200px;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment