Skip to content

Commit

Permalink
added audio e2e metric
Browse files Browse the repository at this point in the history
  • Loading branch information
vpalmisano committed May 22, 2024
1 parent a28dfba commit e83efac
Show file tree
Hide file tree
Showing 6 changed files with 245 additions and 14 deletions.
201 changes: 201 additions & 0 deletions scripts/e2e-audio-stats.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
/* global log, enabledForSession, ggwave_factory, MeasuredStats */

/**
* Audio end-to-end delay stats.
* @type MeasuredStats
*/
const audioEndToEndDelay = new MeasuredStats({ ttl: 15 })

window.collectAudioEndToEndDelayStats = () => {
return audioEndToEndDelay.mean()
}

function convertTypedArray(src, type) {
const buffer = new ArrayBuffer(src.byteLength)
new src.constructor(buffer).set(src)
return new type(buffer)
}

let ggwave = null

if (enabledForSession(window.PARAMS?.timestampWatermarkAudio)) {
document.addEventListener('DOMContentLoaded', async () => {
try {
ggwave = await ggwave_factory()
} catch (e) {
log(`ggwave error: ${e}`)
}
})
}

/** @type AudioContext */
let audioContext = null
/** @type MediaStreamAudioDestinationNode */
let audioDestination = null

const SEND_PERIOD = 5000

function initAudioTimestampWatermarkSender() {
if (audioContext) return
log(`initAudioTimestampWatermarkSender with interval ${SEND_PERIOD}ms`)

const AudioContext = window.AudioContext || window.webkitAudioContext
audioContext = new AudioContext({
latencyHint: 'interactive',
sampleRate: 48000,
})
audioDestination = audioContext.createMediaStreamDestination()
const parameters = ggwave.getDefaultParameters()
parameters.sampleRateInp = audioContext.sampleRate
parameters.sampleRateOut = audioContext.sampleRate
parameters.operatingMode =
ggwave.GGWAVE_OPERATING_MODE_TX | ggwave.GGWAVE_OPERATING_MODE_USE_DSS
const instance = ggwave.init(parameters)

setInterval(() => {
const now = Date.now()
const waveform = ggwave.encode(
instance,
now.toString(),
ggwave.ProtocolId.GGWAVE_PROTOCOL_AUDIBLE_FAST,
10,
)
const buf = convertTypedArray(waveform, Float32Array)
const buffer = audioContext.createBuffer(
1,
buf.length,
audioContext.sampleRate,
)
buffer.copyToChannel(buf, 0)
const source = audioContext.createBufferSource()
source.buffer = buffer
source.connect(audioDestination)
source.start()
}, SEND_PERIOD)
}

window.applyAudioTimestampWatermark = mediaStream => {
if (mediaStream.getAudioTracks().length === 0) {
return mediaStream
}
if (!audioDestination) {
initAudioTimestampWatermarkSender()
}
log(
`AudioTimestampWatermark tx overrideGetUserMediaStream`,
mediaStream.getAudioTracks()[0].id,
'->',
audioDestination.stream.getAudioTracks()[0].id,
)

// Mix original track with watermark.
const track = mediaStream.getAudioTracks()[0]
const trackSource = audioContext.createMediaStreamSource(
new MediaStream([track]),
)
const gain = audioContext.createGain()
gain.gain.value = 0.005
trackSource.connect(gain)
gain.connect(audioDestination)

track.addEventListener('ended', () => {
trackSource.disconnect(gain)
gain.disconnect(audioDestination)
})

const newMediaStream = new MediaStream([
audioDestination.stream.getAudioTracks()[0].clone(),
...mediaStream.getVideoTracks(),
])

return newMediaStream
}

let processingAudioTracks = 0

window.recognizeAudioTimestampWatermark = track => {
if (processingAudioTracks > 4) {
return
}
processingAudioTracks += 1

const samplesPerFrame = 1024
const buf = new Float32Array(samplesPerFrame)
let bufIndex = 0
let instance = null

const writableStream = new window.WritableStream(
{
async write(frame) {
const { numberOfFrames, sampleRate } = frame
if (instance === null) {
const parameters = ggwave.getDefaultParameters()
parameters.sampleRateInp = sampleRate
parameters.sampleRateOut = sampleRate
parameters.samplesPerFrame = samplesPerFrame
parameters.operatingMode =
ggwave.GGWAVE_OPERATING_MODE_RX |
ggwave.GGWAVE_OPERATING_MODE_USE_DSS
instance = ggwave.init(parameters)
if (instance < 0) {
log(`AudioTimestampWatermark rx init failed: ${instance}`)
return
}
processingAudioTracks += 1
}

try {
const tmp = new Float32Array(numberOfFrames)
frame.copyTo(tmp, { planeIndex: 0 })

const addedFrames = Math.min(
numberOfFrames,
samplesPerFrame - bufIndex,
)
buf.set(tmp.slice(0, addedFrames), bufIndex)
bufIndex += numberOfFrames

if (bufIndex < samplesPerFrame) return

const now = Date.now()
const res = ggwave.decode(instance, convertTypedArray(buf, Int8Array))
buf.set(tmp.slice(addedFrames), 0)
bufIndex = numberOfFrames - addedFrames

if (res && res.length > 0) {
const data = new TextDecoder('utf-8').decode(res)
try {
const ts = parseInt(data)
const rxFrames = ggwave.rxDurationFrames(instance) + 4
const rxFramesDuration =
(rxFrames * 1000 * samplesPerFrame) / sampleRate
const delay = now - ts - rxFramesDuration
log(
`AudioTimestampWatermark rx delay: ${delay}ms rxFrames: ${rxFrames} rxFramesDuration: ${rxFramesDuration}ms`,
)
if (isFinite(delay) && delay > 0 && delay < 5000) {
audioEndToEndDelay.push(now, delay / 1000)
}
} catch (e) {
log(
`AudioTimestampWatermark rx failed to parse ${data}: ${e.message}`,
)
}
}
} catch (err) {
log(`AudioTimestampWatermark error: ${err.message}`)
}
},
close() {
processingAudioTracks -= 1
if (instance) ggwave.free(instance)
},
abort(err) {
log('AudioTimestampWatermark error:', err)
},
},
new CountQueuingStrategy({ highWaterMark: 100 }),
)
const trackProcessor = new window.MediaStreamTrackProcessor({ track })
trackProcessor.readable.pipeTo(writableStream)
}
8 changes: 4 additions & 4 deletions scripts/e2e-video-stats.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ window.collectVideoEndToEndDelayStats = () => {
* @param {MediaStream} mediaStream
* @returns {MediaStream}
*/
window.applyTimestampWatermark = mediaStream => {
window.applyVideoTimestampWatermark = mediaStream => {
if (
!('MediaStreamTrackProcessor' in window) ||
!('MediaStreamTrackGenerator' in window)
Expand Down Expand Up @@ -145,11 +145,11 @@ async function loadTesseract() {
}

/**
* recognizeTimestampWatermark
* recognizeVideoTimestampWatermark
* @param {MediaStreamTrack} videoTrack
* @param {number} measureInterval
*/
window.recognizeTimestampWatermark = async (
window.recognizeVideoTimestampWatermark = async (
videoTrack,
measureInterval = 5,
) => {
Expand Down Expand Up @@ -205,7 +205,7 @@ window.recognizeTimestampWatermark = async (
}
}
} catch (err) {
log(`recognizeTimestampWatermark error: ${err.message}`)
log(`recognizeVideoTimestampWatermark error: ${err.message}`)
}
}
videoFrame.close()
Expand Down
8 changes: 6 additions & 2 deletions scripts/get-user-media.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/* global log, sleep, applyTimestampWatermark, enabledForSession */
/* global log, sleep, applyAudioTimestampWatermark, applyVideoTimestampWatermark, enabledForSession */

const applyOverride = (constraints, override) => {
if (override) {
Expand Down Expand Up @@ -165,8 +165,12 @@ if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
log(`collectMediaTracks error:`, err)
}

if (enabledForSession(window.PARAMS?.timestampWatermarkAudio)) {
mediaStream = applyAudioTimestampWatermark(mediaStream)
}

if (enabledForSession(window.PARAMS?.timestampWatermarkVideo)) {
mediaStream = applyTimestampWatermark(mediaStream)
mediaStream = applyVideoTimestampWatermark(mediaStream)
}

return mediaStream
Expand Down
7 changes: 5 additions & 2 deletions scripts/peer-connection.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/* global log, PeerConnections, handleTransceiverForInsertableStreams, handleTransceiverForPlayoutDelayHint, recognizeTimestampWatermark, saveMediaTrack, enabledForSession, watchObjectProperty */
/* global log, PeerConnections, handleTransceiverForInsertableStreams, handleTransceiverForPlayoutDelayHint, recognizeAudioTimestampWatermark, recognizeVideoTimestampWatermark, saveMediaTrack, enabledForSession, watchObjectProperty */

const timestampInsertableStreams = !!window.PARAMS?.timestampInsertableStreams

Expand Down Expand Up @@ -171,13 +171,16 @@ window.RTCPeerConnection = function (conf, options) {
}
if (receiver.track.kind === 'video') {
if (enabledForSession(window.PARAMS?.timestampWatermarkVideo)) {
recognizeTimestampWatermark(receiver.track)
recognizeVideoTimestampWatermark(receiver.track)
}

if (enabledForSession(window.PARAMS?.saveRecvVideoTrack)) {
await saveMediaTrack(receiver.track, 'recv')
}
} else if (receiver.track.kind === 'audio') {
if (window.PARAMS?.timestampWatermarkAudio) {
recognizeAudioTimestampWatermark(receiver.track)
}
if (enabledForSession(window.PARAMS?.saveRecvAudioTrack)) {
await saveMediaTrack(receiver.track, 'recv')
}
Expand Down
3 changes: 3 additions & 0 deletions src/rtcstats.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ export enum PageStatsNames {
/** The page HTTP receive latency. */
httpRecvLatency = 'httpRecvLatency',

/** The audio end to end total delay. */
audioEndToEndDelay = 'audioEndToEndDelay',

/** The video end to end total delay. */
videoEndToEndDelay = 'videoEndToEndDelay',
/**
Expand Down
32 changes: 26 additions & 6 deletions src/session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ declare global {
signalingHost?: string
participantName?: string
}>
let collectAudioEndToEndDelayStats: () => number
let collectVideoEndToEndDelayStats: () => number
let collectVideoEndToEndNetworkDelayStats: () => number
let collectHttpResourcesStats: () => {
Expand Down Expand Up @@ -926,14 +927,28 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};
process.env.EXTERNAL_PEER_CONNECTION === 'true' ? '-external' : ''
}.js`,
'scripts/e2e-network-stats.js',
'https://raw.githubusercontent.com/ggerganov/ggwave/master/bindings/javascript/ggwave.js',
'scripts/e2e-audio-stats.js',
'scripts/e2e-video-stats.js',
'scripts/playout-delay-hint.js',
'scripts/page-stats.js',
'scripts/save-tracks.js',
]) {
const filePath = resolvePackagePath(name)
log.debug(`loading ${name} script from: ${filePath}`)
await page.evaluateOnNewDocument(fs.readFileSync(filePath, 'utf8'))
if (name.startsWith('http')) {
log.debug(`loading ${name} script`)
const res = await downloadUrl(name)
if (!res?.data) {
throw new Error(`Failed to download script from: ${name}`)
}
await page.evaluateOnNewDocument(res.data)
} else {
const filePath = resolvePackagePath(name)
if (!fs.existsSync(filePath)) {
throw new Error(`${name} script not found: ${filePath}`)
}
log.debug(`loading ${name} script from: ${filePath}`)
await page.evaluateOnNewDocument(fs.readFileSync(filePath, 'utf8'))
}
}

// Execute external script(s).
Expand Down Expand Up @@ -964,9 +979,7 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};
continue
}
log.debug(`loading custom script from file: ${filePath}`)
await page.evaluateOnNewDocument(
await fs.readFileSync(filePath, 'utf8'),
)
await page.evaluateOnNewDocument(fs.readFileSync(filePath, 'utf8'))
}
}
}
Expand Down Expand Up @@ -1491,6 +1504,7 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};

const pages: Record<string, number> = {}
const peerConnections: Record<string, number> = {}
const audioEndToEndDelayStats: Record<string, number> = {}
const videoEndToEndDelayStats: Record<string, number> = {}
const videoEndToEndNetworkDelayStats: Record<string, number> = {}
const httpRecvBytesStats: Record<string, number> = {}
Expand All @@ -1515,11 +1529,13 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};
// Collect stats from page.
const {
peerConnectionStats,
audioEndToEndDelay,
videoEndToEndDelay,
videoEndToEndNetworkDelay,
httpResourcesStats,
} = await page.evaluate(async () => ({
peerConnectionStats: await collectPeerConnectionStats(),
audioEndToEndDelay: collectAudioEndToEndDelayStats(),
videoEndToEndDelay: collectVideoEndToEndDelayStats(),
videoEndToEndNetworkDelay: collectVideoEndToEndNetworkDelayStats(),
httpResourcesStats: collectHttpResourcesStats(),
Expand Down Expand Up @@ -1562,6 +1578,9 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};
peerConnections[hostKey] += activePeerConnections

// E2E stats.
if (audioEndToEndDelay) {
audioEndToEndDelayStats[pageKey] = audioEndToEndDelay
}
if (videoEndToEndDelay) {
videoEndToEndDelayStats[pageKey] = videoEndToEndDelay
}
Expand Down Expand Up @@ -1682,6 +1701,7 @@ window.SERVER_USE_HTTPS = ${this.serverUseHttps};
collectedStats.errors = this.pageErrors
collectedStats.warnings = this.pageWarnings
collectedStats.peerConnections = peerConnections
collectedStats.audioEndToEndDelay = audioEndToEndDelayStats
collectedStats.videoEndToEndDelay = videoEndToEndDelayStats
collectedStats.videoEndToEndNetworkDelay = videoEndToEndNetworkDelayStats
collectedStats.httpRecvBytes = httpRecvBytesStats
Expand Down

0 comments on commit e83efac

Please sign in to comment.