Skip to content

Instantly share code, notes, and snippets.

@ddrscott
Created June 13, 2023 21:05
Show Gist options
  • Save ddrscott/080a9b411ad406e1e5d7e6f3d36d6b48 to your computer and use it in GitHub Desktop.
Save ddrscott/080a9b411ad406e1e5d7e6f3d36d6b48 to your computer and use it in GitHub Desktop.

Prompt

I want to create a FFT visualization in a single page web app. Please separate the HTMLCSS in JavaScript. The canvas should fill up the entire bottom of the screen. Provide a button at the top to start and stop recording. The FFT should be based on web audio taking microphone recordings.

Code

<!DOCTYPE html>
<html>
  <head>
    <meta charset="UTF-8">
    <title>FFT Visualization</title>
    <style>
      body {
        margin: 0;
        padding: 0;
      }
      canvas {
        position: absolute;
        bottom: 0;
        width: 100%;
        height: 100%;
      }
      button {
        z-index: 1;
        position: absolute;
        top: 10px;
        left: 10px;
      }
    </style>
  </head>
  <body>
    <button id="recordButton">Record</button>
    <canvas id="canvas"></canvas>
    <script src="script.js"></script>
  </body>
</html>
const canvas = document.getElementById('canvas');
const ctx = canvas.getContext('2d');
const recordButton = document.getElementById('recordButton');

let audioContext;
let analyser;
let dataArray;
let bufferLength;

const WIDTH = canvas.width;
const HEIGHT = canvas.height;

function setupAudio() {
  audioContext = new AudioContext();
  analyser = audioContext.createAnalyser();
  analyser.fftSize = 2048;
  bufferLength = analyser.frequencyBinCount;
  dataArray = new Uint8Array(bufferLength);
  navigator.mediaDevices.getUserMedia({ audio: true })
    .then(stream => {
      const source = audioContext.createMediaStreamSource(stream);
      source.connect(analyser);
      draw();
    })
    .catch(error => console.error(error));
}

function draw() {
  requestAnimationFrame(draw);
  analyser.getByteFrequencyData(dataArray);
  ctx.fillStyle = 'rgb(0, 0, 0)';
  ctx.fillRect(0, 0, WIDTH, HEIGHT);
  const barWidth = (WIDTH / bufferLength) * 2.5;
  let barHeight;
  let x = 0;
  for (let i = 0; i < bufferLength; i++) {
    barHeight = dataArray[i];
    ctx.fillStyle = `rgb(${barHeight + 100},50,50)`;
    ctx.fillRect(x, HEIGHT - barHeight / 2, barWidth, barHeight / 2);
    x += barWidth + 1;
  }
}

recordButton.addEventListener('click', () => {
  if (audioContext) {
    if (audioContext.state === 'running') {
      audioContext.suspend();
      recordButton.textContent = 'Record';
    } else if (audioContext.state === 'suspended') {
      audioContext.resume();
      recordButton.textContent = 'Stop';
    }
  } else {
    setupAudio();
    recordButton.textContent = 'Stop';
  }
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment