web: fix audio on firefox
Build Mumble Web 2 / windows_build (push) Successful in 2m39s
Build Mumble Web 2 / linux_build (push) Successful in 4m57s

This commit is contained in:
2025-12-05 01:54:01 -07:00
parent d6b482528f
commit 37613a65c4
5 changed files with 109 additions and 64 deletions
+7 -5
View File
@@ -30,8 +30,6 @@ web-sys = { version = "^0.3.72", features = [
"EncodedAudioChunkInit", "EncodedAudioChunkInit",
"EncodedAudioChunkType", "EncodedAudioChunkType",
"CodecState", "CodecState",
"MediaStreamTrackGenerator",
"MediaStreamTrackGeneratorInit",
"AudioContext", "AudioContext",
"AudioContextOptions", "AudioContextOptions",
"MediaStream", "MediaStream",
@@ -42,6 +40,7 @@ web-sys = { version = "^0.3.72", features = [
"AudioWorkletNode", "AudioWorkletNode",
"AudioWorklet", "AudioWorklet",
"AudioWorkletProcessor", "AudioWorkletProcessor",
"MessagePort",
"MediaStreamConstraints", "MediaStreamConstraints",
"WorkletOptions", "WorkletOptions",
"AudioEncoder", "AudioEncoder",
@@ -88,7 +87,7 @@ tracing = "^0.1.40"
color-eyre = "^0.6.3" color-eyre = "^0.6.3"
crossbeam-queue = "^0.3.11" crossbeam-queue = "^0.3.11"
lol_html = "^2.2.0" lol_html = "^2.2.0"
rfd = { git = "https://github.com/samsartor/rfd.git", version = "^0.16.0", default-features = false } rfd = { git = "https://github.com/samsartor/rfd.git", version = "^0.16.0", default-features = false }
base64 = "^0.22" base64 = "^0.22"
mime_guess = "^2.0.5" mime_guess = "^2.0.5"
async_cell = "^0.2.3" async_cell = "^0.2.3"
@@ -97,7 +96,9 @@ dioxus-asset-resolver = "0.7.1"
# Denoising # Denoising
# ========= # =========
deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = ["tract"] } deep_filter = { git = "https://github.com/Rikorose/DeepFilterNet.git", rev = "d375b2d8309e0935d165700c91da9de862a99c31", features = [
"tract",
] }
crossbeam = "0.8.4" crossbeam = "0.8.4"
[patch.crates-io] [patch.crates-io]
@@ -130,4 +131,5 @@ desktop = [
"dasp_ring_buffer", "dasp_ring_buffer",
"rfd/xdg-portal", "rfd/xdg-portal",
"rfd/tokio", "rfd/tokio",
] ]
@@ -1,7 +1,7 @@
const SAMPLE_RATE = 48000; const SAMPLE_RATE = 48000;
const PACKET_SAMPLES = 960; const PACKET_SAMPLES = 960;
class RustWorklet extends AudioWorkletProcessor { class RustMicWorklet extends AudioWorkletProcessor {
constructor(options) { constructor(options) {
super(); super();
this.module = options.processorOptions; this.module = options.processorOptions;
@@ -31,7 +31,7 @@ class RustWorklet extends AudioWorkletProcessor {
} }
this.buffer_offset -= PACKET_SAMPLES; this.buffer_offset -= PACKET_SAMPLES;
this.timestamp = null; this.timestamp = null;
} }
process(inputs) { process(inputs) {
//console.log(inputs); //console.log(inputs);
@@ -60,4 +60,44 @@ class RustWorklet extends AudioWorkletProcessor {
} }
}; };
registerProcessor("rust_mic_worklet", RustWorklet);
class RustSpeakerWorklet extends AudioWorkletProcessor {
constructor() {
super();
this.queue = [];
this.readIndex = 0;
this.port.onmessage = (event) => {
this.queue.push(event.data)
};
}
process(inputs, outputs) {
if (this.queue.length) {
console.log(this.queue[0].samples.length, outputs[0][0].length);
}
const output = outputs[0];
for (let i = 0; i < output[0].length; i++) {
if (!this.queue.length) {
return true;
}
const current = this.queue[0];
for (let ch = 0; ch < output.length; ch++) {
output[ch][i] = current.samples[this.readIndex];
}
this.readIndex++;
if (this.readIndex >= current.samples.length) {
this.queue.shift();
this.readIndex = 0;
}
}
return true;
}
};
registerProcessor("rust_mic_worklet", RustMicWorklet);
registerProcessor("rust_speaker_worklet", RustSpeakerWorklet);
+1 -1
View File
@@ -43,7 +43,7 @@ const PACKET_SAMPLES: u32 = 960;
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>; type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl AudioSystem { impl AudioSystem {
pub fn new() -> Result<Self, Error> { pub async fn new() -> Result<Self, Error> {
// TODO // TODO
let host = cpal::default_host(); let host = cpal::default_host();
let name = host.id(); let name = host.id();
+57 -54
View File
@@ -15,7 +15,6 @@ use tracing::{debug, error, info, instrument};
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::JsFuture; use wasm_bindgen_futures::JsFuture;
use web_sys::js_sys::{Promise, Reflect, Uint8Array}; use web_sys::js_sys::{Promise, Reflect, Uint8Array};
use web_sys::AudioContext;
use web_sys::AudioContextOptions; use web_sys::AudioContextOptions;
use web_sys::AudioData; use web_sys::AudioData;
use web_sys::AudioDecoder; use web_sys::AudioDecoder;
@@ -30,14 +29,13 @@ use web_sys::EncodedAudioChunkInit;
use web_sys::EncodedAudioChunkType; use web_sys::EncodedAudioChunkType;
use web_sys::MediaStream; use web_sys::MediaStream;
use web_sys::MediaStreamConstraints; use web_sys::MediaStreamConstraints;
use web_sys::MediaStreamTrackGenerator;
use web_sys::MediaStreamTrackGeneratorInit;
use web_sys::MessageEvent; use web_sys::MessageEvent;
use web_sys::WebTransport; use web_sys::WebTransport;
use web_sys::WebTransportBidirectionalStream; use web_sys::WebTransportBidirectionalStream;
use web_sys::WebTransportOptions; use web_sys::WebTransportOptions;
use web_sys::WorkletOptions; use web_sys::WorkletOptions;
use web_sys::{console, window}; use web_sys::{console, window};
use web_sys::{AudioContext, AudioDataCopyToOptions};
pub use wasm_bindgen_futures::spawn_local as spawn; pub use wasm_bindgen_futures::spawn_local as spawn;
@@ -78,12 +76,41 @@ pub struct AudioSystem {
processors: AudioProcessorSender, processors: AudioProcessorSender,
} }
async fn attach_worklet(audio_context: &AudioContext) -> Result<(), Error> {
// Create worklets to process mic and speaker audio
// Speaker audio processing worklet only required on
// browsers that don't support MediaStreamTrackGenerator
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
let module = asset!("assets/rust_audio_worklet.js").to_string();
info!("loading mic worklet from {module:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(&module, &options)
.ey()?
.into_future()
.await
.ey()?;
Ok(())
}
impl AudioSystem { impl AudioSystem {
pub fn new() -> Result<Self, Error> { pub async fn new() -> Result<Self, Error> {
// Create MediaStreams to playback decoded audio // Create MediaStreams to playback decoded audio
// The audio context is used to reproduce audio. // The audio context is used to reproduce audio.
let webctx = configure_audio_context(); let webctx = configure_audio_context();
attach_worklet(&webctx).await?;
let processors = AudioProcessorSender::default(); let processors = AudioProcessorSender::default();
Ok(AudioSystem { webctx, processors }) Ok(AudioSystem { webctx, processors })
} }
@@ -104,18 +131,10 @@ impl AudioSystem {
} }
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> { pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
let audio_stream_generator = let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
MediaStreamTrackGenerator::new(&MediaStreamTrackGeneratorInit::new("audio")).ey()?;
// Create MediaStream from MediaStreamTrackGenerator // Connect worklet to destination
let js_tracks = web_sys::js_sys::Array::new(); sink_node
js_tracks.push(&audio_stream_generator);
let media_stream = MediaStream::new_with_tracks(&js_tracks).ey()?;
// Create MediaStreamAudioSourceNode
let audio_source = self.webctx.create_media_stream_source(&media_stream).ey()?;
// Connect output of audio_source to audio_context (browser audio)
audio_source
.connect_with_audio_node(&self.webctx.destination()) .connect_with_audio_node(&self.webctx.destination())
.ey()?; .ey()?;
@@ -124,28 +143,31 @@ impl AudioSystem {
error!("error decoding audio {:?}", e); error!("error decoding audio {:?}", e);
}) as Box<dyn FnMut(JsValue)>); }) as Box<dyn FnMut(JsValue)>);
// This knows what MediaStreamTrackGenerator to use as it closes around it let sink_port = sink_node.port().ey()?;
let output = Closure::wrap(Box::new(move |audio_data: AudioData| { let output = Closure::wrap(Box::new(move |audio_data: AudioData| {
let writable = audio_stream_generator.writable(); // Extract planar PCM from AudioData into an ArrayBuffer or Float32Array
if writable.locked() { // Here we assume f32 samples, 1 channel for brevity.
return; let number_of_frames = audio_data.number_of_frames();
}
if let Err(e) = writable.get_writer().map(|writer| { let js_buffer = Float32Array::new_with_length(number_of_frames);
spawn(async move {
if let Err(e) = JsFuture::from(writer.ready()).await.ey() { let audio_data_copy_to_options = &AudioDataCopyToOptions::new(0);
error!("write chunk ready error {:?}", e); audio_data_copy_to_options.set_format(web_sys::AudioSampleFormat::F32);
}
if let Err(e) = JsFuture::from(writer.write_with_chunk(&audio_data)) if let Err(e) = audio_data
.await .copy_to_with_buffer_source(&js_buffer.buffer(), &audio_data_copy_to_options)
.ey() {
{ error!("could not copy audio data to array {:?}", e);
error!("write chunk error {:?}", e);
};
writer.release_lock();
});
}) {
error!("error writing audio data {:?}", e);
} }
// Post to the worklet; include sampleRate and channel count if needed.
let msg = js_sys::Object::new();
js_sys::Reflect::set(&msg, &"samples".into(), &js_buffer).unwrap();
sink_port.post_message(&msg).unwrap();
audio_data.close();
}) as Box<dyn FnMut(AudioData)>); }) as Box<dyn FnMut(AudioData)>);
let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new( let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new(
@@ -234,25 +256,6 @@ async fn run_encoder_worklet(
.map_err(|e| JsError::new(&format!("not a stream: {e:?}"))) .map_err(|e| JsError::new(&format!("not a stream: {e:?}")))
.ey()?; .ey()?;
let options = WorkletOptions::new();
Reflect::set(
&options,
&"processorOptions".into(),
&wasm_bindgen::module(),
)
.ey()?;
let module = asset!("assets/rust_mic_worklet.js").to_string();
info!("loading mic worklet from {module:?}");
audio_context
.audio_worklet()
.ey()?
.add_module_with_options(&module, &options)
.ey()?
.into_future()
.await
.ey()?;
let source = audio_context.create_media_stream_source(&stream).ey()?; let source = audio_context.create_media_stream_source(&stream).ey()?;
let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?; let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?;
+1 -1
View File
@@ -113,7 +113,7 @@ pub async fn network_loop<R: imp::ImpRead, W: imp::ImpWrite>(
}); });
} }
let mut audio = imp::AudioSystem::new()?; let mut audio = imp::AudioSystem::new().await?;
{ {
let send_chan = send_chan.clone(); let send_chan = send_chan.clone();
let mut sequence_num = 0; let mut sequence_num = 0;