Skip to content

Instantly share code, notes, and snippets.

@ededejr
Created January 25, 2022 02:06
Show Gist options
  • Save ededejr/3b7575f4fb50c290d5adcbe499840fef to your computer and use it in GitHub Desktop.
Save ededejr/3b7575f4fb50c290d5adcbe499840fef to your computer and use it in GitHub Desktop.
Generate a waveform on a Canvas element, by passing in a url. Extracted from an early draft of de's personal site.
import React, { useEffect, useRef, useState } from "react";
import useAudioContext from "./useAudioContext";
import { AppColor, de } from "../../stylesheet";
import { useStylesheet } from "../../stylesheet/hooks";
const Canvas = de.createMotion.canvas`
width: 100%;
height: 100%;
`;
export default function useWaveform(
source: string,
color: AppColor = "primary",
complexWaveform?: boolean
) {
const stylesheet = useStylesheet();
const audioContext = useAudioContext();
const [audioData, setAudioData] = useState<AudioData>();
const canvasRef = useRef<HTMLCanvasElement>(null);
useEffect(() => {
fetchAudioVisulizationData(audioContext, source, complexWaveform).then(
(data) => {
if (canvasRef.current) {
draw(
data.waveformData,
canvasRef.current,
stylesheet.theme.colors[color]
);
setAudioData(data);
}
}
);
}, [color, complexWaveform]);
const canvas = <Canvas ref={canvasRef} />;
return { audioData, canvas, canvasRef };
}
interface AudioData {
audioBuffer: AudioBuffer;
duration: number;
url: string;
waveformData: number[];
}
const AudioBufferCache = new Map<string, AudioBuffer>();
/**
* Fetch and normalize an array buffer for an audio file.
* @param audioContext The audio context to use
* @param url The source url
* @returns
*/
async function fetchAudioVisulizationData(
audioContext: AudioContext,
url: string,
complexWaveform?: boolean
): Promise<AudioData> {
let audioBuffer: AudioBuffer;
if (AudioBufferCache.has(url)) {
// This will never use the default as there will always be data
audioBuffer =
AudioBufferCache.get(url) ||
new AudioBuffer({ length: 0, sampleRate: 0 });
} else {
const response = await fetch(url);
const arrayBuffer = await response.arrayBuffer();
audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
AudioBufferCache.set(url, audioBuffer);
}
return {
audioBuffer,
duration: audioBuffer.duration,
url,
waveformData: filterAndNormalizeData(audioBuffer, complexWaveform),
};
}
/**
* Filter an normalize data in an AudioBuffer.
*
* @param audioBuffer AudioBuffer
* @param complexWaveform Display data as a complex waveform
* @returns
*/
function filterAndNormalizeData(
audioBuffer: AudioBuffer,
complexWaveform?: boolean
) {
// We only need to work with one channel of data
const rawData = audioBuffer.getChannelData(0);
// Number of samples we want to have in our final data set
const samples = complexWaveform
? WAVEFORM_RATIOS.complex(rawData.length)
: WAVEFORM_RATIOS.simple(rawData.length);
// the number of samples in each subdivision
const blockSize = Math.floor(rawData.length / samples);
const filteredData = [];
for (let i = 0; i < samples; i++) {
// the location of the first sample in the block
let blockStart = blockSize * i;
let sum = 0;
// find the sum of all the samples in the block
for (let j = 0; j < blockSize; j++) {
sum = sum + Math.abs(rawData[blockStart + j]);
}
// divide the sum by the block size to get the average
filteredData.push(sum / blockSize);
}
const multiplier = Math.pow(Math.max(...filteredData), -1);
return filteredData.map((n) => Math.min(n * multiplier, 0.95));
}
function draw(
normalizedData: number[],
canvas: HTMLCanvasElement,
strokeStyle: string
) {
// Set up the canvas
const { clientHeight, clientWidth } = canvas;
const dpr = window.devicePixelRatio || 1;
canvas.width = clientWidth * dpr;
canvas.height = clientHeight * dpr;
const yOrigin = clientHeight * 0.5;
const ctx = canvas.getContext("2d");
if (ctx) {
ctx.scale(dpr, dpr);
// Set Y = 0 to be in the middle of the canvas, instead of top left
ctx.translate(0, yOrigin);
const dataPointWidth = canvas.width / normalizedData.length;
// draw the line segments
for (let i = 0; i < normalizedData.length; i++) {
const dataPoint = normalizedData[i];
const yCoordinate = Math.min(0.92, dataPoint) * yOrigin;
const xCoordinate = i * dataPointWidth;
drawLineSegment(
ctx,
xCoordinate,
yCoordinate,
dataPointWidth,
Boolean((i + 1) % 2),
strokeStyle
);
}
}
}
function drawLineSegment(
ctx: CanvasRenderingContext2D,
x: number,
y: number,
width: number,
isEven: boolean,
strokeStyle: string
) {
ctx.lineWidth = 1;
ctx.lineCap = "round";
ctx.strokeStyle = strokeStyle;
ctx.beginPath();
y = isEven ? y : -y;
ctx.moveTo(x, 0);
ctx.lineTo(x, y);
ctx.arc(x + width / 2, y, width / 2, Math.PI, 0, isEven);
ctx.lineTo(x + width, 0);
ctx.stroke();
}
const createRatioFn = (ratio: number) => (value: number) =>
Math.floor(ratio * value);
const WAVEFORM_RATIOS = {
// Dervied by observing a given sample
// of length 2048960, with a sample rate of 2000.
// 2000 / (2000 + 2048960)
complex: createRatioFn(0.0009751530990365487),
// Dervied by observing a given sample
// of length 2048960, with a sample rate of 500.
// ==> 500 / (500 + 2048960)
simple: createRatioFn(0.00024396670342431665),
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment