Skip to content

Instantly share code, notes, and snippets.

@leviyehonatan
Last active December 28, 2019 10:44
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save leviyehonatan/4ae7d4711f70b08089fbffe9e0bc9077 to your computer and use it in GitHub Desktop.
Save leviyehonatan/4ae7d4711f70b08089fbffe9e0bc9077 to your computer and use it in GitHub Desktop.
import React, { useEffect, useState, useCallback, useRef } from 'react';
import Axios from 'axios';
import WaveformCanvas from './WaveformCanvas';
import Timeline from './Timeline';
import { useAnimationFrame } from '../../helpers/helper';
import { useKeyPress } from '../../hooks/keyPressHooks';
export default function Waveform({ src }) {
const audioContextRef = useRef();
const audioBufferRef = useRef();
const bufferSourceRef = useRef();
const [msPerPixel, setMsPerPixel] = useState(50);
const [playPosition, setPlayPosition] = useState(0);
const [playStartTime, setPlayStartTime] = useState(null);
const [isPlaying, setIsPlaying] = useState();
const playPressed = useKeyPress(' ');
console.log('render');
const playPausePressed = useCallback(() => {
console.log('playPause');
if (!isPlaying) {
console.log('playing');
setPlayStartTime(audioContextRef.current.currentTime);
bufferSourceRef.current.start();
} else {
console.log('stopping');
bufferSourceRef.current.stop();
}
setIsPlaying(c => !c);
}, [isPlaying]);
useEffect(() => {
if (playPressed) {
console.log('playPressed');
playPausePressed();
}
}, [, playPausePressed]);
useAnimationFrame(deltaTime => {
if (isPlaying) {
const lengthSeconds = audioBuffer.length / audioBuffer.samplerate;
const secondsFromStart =
(audioContextRef.current.currentTime - playStartTime) * 1000;
setPlayPosition(secondsFromStart / lengthSeconds);
}
});
useEffect(() => {
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioContextRef.current = new AudioContext();
}, []);
useEffect(() => {
if (audioContextRef.current == null) return;
console.log('loadFromUrl');
const loadAudio = async () => {
const response = await Axios({
method: 'get',
url: src,
responseType: 'arraybuffer',
});
const decodedBuffer = await audioContextRef.current.decodeAudioData(
response.data,
);
audioBufferRef.current = decodedBuffer;
bufferSourceRef.current = audioContextRef.current.createBufferSource();
bufferSourceRef.current.buffer = audioBufferRef.current;
bufferSourceRef.current.connect(
audioContextRef.current.destination,
);
setIsPlaying(false);
};
loadAudio();
}, [src]);
const onDown = useCallback(
event => {
if (event.code === 'Space') {
playPausePressed();
}
},
[playPausePressed],
);
const onUp = useCallback(event => {
console.log(event);
});
useEffect(() => {
window.addEventListener('keydown', onDown);
window.addEventListener('keyup', onUp);
// Remove event listeners on cleanup
return () => {
window.removeEventListener('keydown', onDown);
window.removeEventListener('keyup', onUp);
};
}, [onDown, onUp]); // Empty array ensures that effect is only run on mount and unmount
if (audioBufferRef.current == null) {
return <div className="App">loading</div>;
}
const audioBuffer = audioBufferRef.current;
const samplesPerPixel = (msPerPixel / 1000) * audioBuffer.sampleRate;
const canvases = [];
for (let i = 0; i < audioBuffer.numberOfChannels; i++) {
canvases.push(
<WaveformCanvas
key={i}
audioBuffer={audioBuffer}
channelIndex={i}
samplesPerPixel={samplesPerPixel}
/>,
);
}
const width = audioBuffer.length / samplesPerPixel;
return (
<div style={{ width: '100vw', overflow: 'scroll' }}>
<div
width="1px"
height="100%"
style={{ backgroundColor: 'red', left: playPosition * width }}
/>
<Timeline
audioBuffer={audioBuffer}
samplesPerPixel={samplesPerPixel}
/>
{canvases}
</div>
);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment