Skip to content

Commit

Permalink
Fix: webaudio shim compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
katspaugh committed Dec 28, 2023
1 parent 2fe4f81 commit 352afbc
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 97 deletions.
17 changes: 11 additions & 6 deletions src/multitrack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,12 @@ class MultiTrack extends EventEmitter<MultitrackEvents> {
return Math.max(max, track.startPosition + durations[index])
}, 0)

const placeholderAudio = this.audios[this.audios.length - 1] as HTMLAudioElement & { duration: number }
placeholderAudio.duration = this.maxDuration
this.durations[this.durations.length - 1] = this.maxDuration
const placeholderAudioIndex = this.audios.findIndex((a) => a.src === PLACEHOLDER_TRACK.url)
const placeholderAudio = this.audios[placeholderAudioIndex]
if (placeholderAudio) {
;(placeholderAudio as WebAudioPlayer & { duration: number }).duration = this.maxDuration
this.durations[placeholderAudioIndex] = this.maxDuration
}

this.rendering.setMainWidth(durations, this.maxDuration)
}
Expand Down Expand Up @@ -259,11 +262,13 @@ class MultiTrack extends EventEmitter<MultitrackEvents> {
color: this.options.trackBackground,
drag: false,
})
introRegion.element.querySelector('[data-resize="left"]')?.remove()
introRegion.element.querySelector('[part*="region-handle-left"]')?.remove()
;(introRegion.element.parentElement as HTMLElement).style.mixBlendMode = 'plus-lighter'
if (track.intro.color) {
;(introRegion.element.querySelector('[data-resize="right"]') as HTMLElement).style.borderColor =
track.intro.color
const rightHandle = introRegion.element.querySelector('[part*="region-handle-right"]') as HTMLElement
if (rightHandle) {
rightHandle.style.borderColor = track.intro.color
}
}

this.subscriptions.push(
Expand Down
153 changes: 62 additions & 91 deletions src/webaudio.ts
Original file line number Diff line number Diff line change
@@ -1,82 +1,94 @@
import EventEmitter from 'wavesurfer.js/dist/event-emitter.js'

type WebAudioPlayerEvents = {
loadedmetadata: []
canplay: []
play: []
pause: []
seeking: []
timeupdate: []
volumechange: []
emptied: []
ended: []
}

/**
* A Web Audio buffer player emulating the behavior of an HTML5 Audio element.
* Web Audio buffer player emulating the behavior of an HTML5 Audio element.
*/
class WebAudioPlayer extends EventEmitter<WebAudioPlayerEvents> {
class WebAudioPlayer {
private audioContext: AudioContext
private gainNode: GainNode
private bufferNode: AudioBufferSourceNode | null = null
private listeners: Map<string, Set<() => void>> = new Map()
private autoplay = false
private playStartTime = 0
private playedDuration = 0
private _src = ''
private _duration = 0
private _muted = false
private buffer: AudioBuffer | null = null
public currentSrc = ''
public paused = true
public crossOrigin: string | null = null

constructor(audioContext = new AudioContext()) {
super()
this.audioContext = audioContext

this.gainNode = this.audioContext.createGain()
this.gainNode.connect(this.audioContext.destination)
}

/** Subscribe to an event. Returns an unsubscribe function. */
addEventListener = this.on
addEventListener(event: string, listener: () => void, options?: { once?: boolean }) {
if (!this.listeners.has(event)) {
this.listeners.set(event, new Set())
}
this.listeners.get(event)?.add(listener)

/** Unsubscribe from an event */
removeEventListener = this.un
if (options?.once) {
const onOnce = () => {
this.removeEventListener(event, onOnce)
this.removeEventListener(event, listener)
}
this.addEventListener(event, onOnce)
}
}

async load() {
return
removeEventListener(event: string, listener: () => void) {
if (this.listeners.has(event)) {
this.listeners.get(event)?.delete(listener)
}
}

private emitEvent(event: string) {
this.listeners.get(event)?.forEach((listener) => listener())
}

get src() {
return this.currentSrc
return this._src
}

set src(value: string) {
this.currentSrc = value
this._src = value

if (!value) {
this.buffer = null
this.emit('emptied')
this._duration = 0
this.emitEvent('emptied')
return
}

fetch(value)
.then((response) => response.arrayBuffer())
.then((arrayBuffer) => {
if (this.currentSrc !== value) return null
if (this.src !== value) return null
return this.audioContext.decodeAudioData(arrayBuffer)
})
.then((audioBuffer) => {
if (this.currentSrc !== value) return
if (this.src !== value || !audioBuffer) return null

this.buffer = audioBuffer
this._duration = audioBuffer.duration

this.emit('loadedmetadata')
this.emit('canplay')
this.emitEvent('loadedmetadata')
this.emitEvent('canplay')

if (this.autoplay) this.play()
if (this.autoplay) {
this.play()
}
})
}

private _play() {
getChannelData() {
const channelData = this.buffer?.getChannelData(0)
return channelData ? [channelData] : undefined
}

async play() {
if (!this.paused) return
this.paused = false

Expand All @@ -85,50 +97,22 @@ class WebAudioPlayer extends EventEmitter<WebAudioPlayerEvents> {
this.bufferNode.buffer = this.buffer
this.bufferNode.connect(this.gainNode)

if (this.playedDuration >= this.duration) {
this.playedDuration = 0
}
const offset = this.playedDuration > 0 ? this.playedDuration : 0
const start =
this.playedDuration > 0 ? this.audioContext.currentTime : this.audioContext.currentTime - this.playedDuration

this.bufferNode.start(this.audioContext.currentTime, this.playedDuration)
this.bufferNode.start(start, offset)
this.playStartTime = this.audioContext.currentTime

this.bufferNode.onended = () => {
if (this.currentTime >= this.duration) {
this.pause()
this.emit('ended')
}
}
this.emitEvent('play')
}

private _pause() {
pause() {
if (this.paused) return
this.paused = true

this.bufferNode?.stop()
this.playedDuration += this.audioContext.currentTime - this.playStartTime
}

async play() {
this._play()
this.emit('play')
}

pause() {
this._pause()
this.emit('pause')
}

stopAt(timeSeconds: number) {
const delay = timeSeconds - this.currentTime
this.bufferNode?.stop(this.audioContext.currentTime + delay)

this.bufferNode?.addEventListener(
'ended',
() => {
this.bufferNode = null
this.pause()
},
{ once: true },
)
this.emitEvent('pause')
}

async setSinkId(deviceId: string) {
Expand All @@ -149,29 +133,32 @@ class WebAudioPlayer extends EventEmitter<WebAudioPlayerEvents> {
return this.paused ? this.playedDuration : this.playedDuration + this.audioContext.currentTime - this.playStartTime
}
set currentTime(value) {
this.emit('seeking')
this.emitEvent('seeking')

if (this.paused) {
this.playedDuration = value
} else {
this._pause()
this.pause()
this.playedDuration = value
this._play()
this.play()
}

this.emit('timeupdate')
this.emitEvent('timeupdate')
}

get duration() {
return this.buffer?.duration || 0
return this._duration
}
set duration(value: number) {
this._duration = value
}

get volume() {
return this.gainNode.gain.value
}
set volume(value) {
this.gainNode.gain.value = value
this.emit('volumechange')
this.emitEvent('volumechange')
}

get muted() {
Expand All @@ -187,22 +174,6 @@ class WebAudioPlayer extends EventEmitter<WebAudioPlayerEvents> {
this.gainNode.connect(this.audioContext.destination)
}
}

/** Get the GainNode used to play the audio. Can be used to attach filters. */
public getGainNode(): GainNode {
return this.gainNode
}

/** Get decoded audio */
public getChannelData(): Float32Array[] {
const channels: Float32Array[] = []
if (!this.buffer) return channels
const numChannels = this.buffer.numberOfChannels
for (let i = 0; i < numChannels; i++) {
channels.push(this.buffer.getChannelData(i))
}
return channels
}
}

export default WebAudioPlayer

0 comments on commit 352afbc

Please sign in to comment.