Skip to content

Instantly share code, notes, and snippets.

@jgentes
Created June 21, 2023 03:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jgentes/88d3de474d431fb390ba54c82c532e0c to your computer and use it in GitHub Desktop.
Save jgentes/88d3de474d431fb390ba54c82c532e0c to your computer and use it in GitHub Desktop.
archive multisync logic
play: async (trackId?: Track['id']) => {
let tracks
if (!trackId) {
// pull players from audioState to play all
;[tracks] = getAudioState()
tracks = Object.keys(tracks) as Track['id'][]
} else tracks = [trackId]
for (const trackId of tracks) {
if (trackId) setAudioState[trackId as number].playing(true)
}
// synchronize playback of all tracks
audioEvents.multiSync(tracks.filter((id): id is number => !!id))
},
multiSync: async (trackIds: Track['id'][]) => {
// Sync all waveforms to the same position
let [syncTimer] = getAudioState.syncTimer()
if (syncTimer) clearInterval(syncTimer)
// Collect audio data to use for sync
const tracks: MultiSyncTrack[] = []
for (const [index, trackId] of trackIds.entries()) {
const { mixpointTime } = await getTrackPrefs(trackId)
const { duration = 1 } = (await db.tracks.get(trackId)) || {}
const [{ waveform, stems, analyserNode }] = getAudioState[trackId]()
if (!waveform) continue
// if we have stems, mute the main waveform
if (stems) waveform.setVolume(0)
// add tracks to sync loop
tracks.push({
trackId,
duration,
mixpointTime,
waveforms: [{ waveform, analyserNode }]
})
if (stems) {
for (const [stem, { waveform, analyserNode }] of Object.entries(
stems
)) {
if (waveform) {
tracks[index].waveforms.push({ waveform, stem, analyserNode })
}
}
}
}
// setup analyser node
const bufferLength = 2048 // fftSize
const dataArray = new Float32Array(bufferLength)
// setup sync loop
syncTimer = setInterval(() => {
for (const track of tracks) {
const volumes: number[] = [] // to aggregate for main volume meter
const [time = 0] = getAudioState[track.trackId].time()
const syncTime = track.waveforms.reduce<number>((pos, audio) => {
let position = pos
const waveform = audio.waveform
if (!waveform.isPlaying()) {
position = Math.max(
pos,
waveform.getCurrentTime() + (track.mixpointTime || 0)
)
}
// this is unreleated to synctime but leveraging the reduce loop to perform the volume analysis operation
audio.analyserNode.getFloatTimeDomainData(dataArray)
const vol = Math.max(...dataArray)
volumes.push(vol)
if (audio.stem)
setAudioState[track.trackId].stems[audio.stem].volumeMeter(vol)
return position
}, time)
// aggregate stem volumes for main volume meter
setAudioState[track.trackId].volumeMeter(Math.max(...volumes))
if (syncTime > time) {
audioEvents.updatePosition(track, syncTime)
}
console.log('updating time', syncTime, time)
//setAudioState[track.trackId].time(syncTime)
}
}, 15)
setAudioState.syncTimer(syncTimer)
for (const track of tracks) {
for (const audio of track.waveforms) {
audio.waveform.play()
}
}
},
updatePosition: (track: MultiSyncTrack, syncTime: number) => {
const precisionSeconds = 0.1
console.log('updating pso')
const [{ playing, time = 0 }] = getAudioState[track.trackId]()
if (Math.abs(syncTime - time) > 0.05) {
setAudioState[track.trackId].time(syncTime)
}
// Update the current time of each audio
for (const { waveform } of track.waveforms) {
const newTime = syncTime - (track.mixpointTime || 0)
if (Math.abs(waveform.getCurrentTime() - newTime) > precisionSeconds) {
waveform.setTime(newTime)
}
// If the position is out of the track bounds, pause it
if (!playing || newTime < 0 || newTime > track.duration) {
waveform.isPlaying() && waveform.pause()
} else if (playing) {
// If the position is in the track bounds, play it
!waveform.isPlaying() && waveform.play()
}
}
},
pause: async (trackId?: Track['id']) => {
// this needs to pause all stems so requires a bit of logic
let waveforms
let trackIds
const [syncTimer] = getAudioState.syncTimer()
if (syncTimer) clearInterval(syncTimer)
if (trackId) {
const [waveform] = getAudioState[trackId].waveform()
waveforms = [waveform]
trackIds = [trackId]
} else {
waveforms = _getAllWaveforms()
const [tracks] = getAudioState()
trackIds = Object.keys(tracks)
}
const stopWaveform = (waveform: WaveSurfer) => waveform.pause()
for (const waveform of waveforms) {
if (waveform) stopWaveform(waveform)
}
for (const id of trackIds) {
const [stems] = getAudioState[Number(id)].stems()
if (stems) {
for (const [stem, { waveform }] of Object.entries(stems)) {
// set volume meter to zero for the stem
setAudioState[Number(id)].stems[stem as Stem].volumeMeter(0)
if (waveform) stopWaveform(waveform)
}
}
setAudioState[Number(id)].playing(false)
setAudioState[Number(id)].volumeMeter(0)
}
},
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment