419 lines
14 KiB
Rust
419 lines
14 KiB
Rust
use crate::app::Command;
|
|
use crate::CONFIG;
|
|
use color_eyre::eyre::{bail, eyre, Error};
|
|
use dioxus::prelude::*;
|
|
use futures::{AsyncRead, AsyncWrite};
|
|
use futures_channel::mpsc::UnboundedSender;
|
|
use gloo_timers::future::TimeoutFuture;
|
|
use mumble_protocol::control::{ClientControlCodec, ControlPacket};
|
|
use mumble_protocol::voice::{VoicePacket, VoicePacketPayload};
|
|
use mumble_protocol::Serverbound;
|
|
use mumble_web2_common::GuiConfig;
|
|
use std::time::Duration;
|
|
use tracing::{debug, error, info, instrument};
|
|
use wasm_bindgen::prelude::*;
|
|
use wasm_bindgen_futures::JsFuture;
|
|
use web_sys::js_sys::{Promise, Reflect, Uint8Array};
|
|
use web_sys::AudioContext;
|
|
use web_sys::AudioContextOptions;
|
|
use web_sys::AudioData;
|
|
use web_sys::AudioDecoder;
|
|
use web_sys::AudioDecoderConfig;
|
|
use web_sys::AudioDecoderInit;
|
|
use web_sys::AudioEncoder;
|
|
use web_sys::AudioEncoderConfig;
|
|
use web_sys::AudioEncoderInit;
|
|
use web_sys::AudioWorkletNode;
|
|
use web_sys::EncodedAudioChunk;
|
|
use web_sys::EncodedAudioChunkInit;
|
|
use web_sys::EncodedAudioChunkType;
|
|
use web_sys::MediaStream;
|
|
use web_sys::MediaStreamConstraints;
|
|
use web_sys::MediaStreamTrackGenerator;
|
|
use web_sys::MediaStreamTrackGeneratorInit;
|
|
use web_sys::MessageEvent;
|
|
use web_sys::WebTransport;
|
|
use web_sys::WebTransportBidirectionalStream;
|
|
use web_sys::WebTransportOptions;
|
|
use web_sys::WorkletOptions;
|
|
use web_sys::{console, window};
|
|
|
|
pub use wasm_bindgen_futures::spawn_local as spawn;
|
|
|
|
pub trait ImpRead: AsyncRead + Unpin + 'static {}
|
|
impl<T: AsyncRead + Unpin + 'static> ImpRead for T {}
|
|
|
|
pub trait ImpWrite: AsyncWrite + Unpin + 'static {}
|
|
impl<T: AsyncWrite + Unpin + 'static> ImpWrite for T {}
|
|
|
|
pub async fn sleep(d: Duration) {
|
|
TimeoutFuture::new(d.as_millis() as u32).await
|
|
}
|
|
|
|
trait ResultExt<T> {
|
|
fn ey(self) -> Result<T, Error>;
|
|
}
|
|
|
|
impl<T> ResultExt<T> for Result<T, JsValue> {
|
|
fn ey(self) -> Result<T, Error> {
|
|
match self {
|
|
Ok(x) => Ok(x),
|
|
Err(e) => match e.dyn_into::<js_sys::Error>() {
|
|
Ok(e) => Err(eyre!("{}: {}", e.name(), e.message())),
|
|
Err(e) => Err(eyre!("{:?}", e)),
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T> ResultExt<T> for Result<T, JsError> {
|
|
fn ey(self) -> Result<T, Error> {
|
|
self.map_err(|e| JsValue::from(e)).ey()
|
|
}
|
|
}
|
|
pub struct AudioSystem(AudioContext);
|
|
|
|
impl AudioSystem {
|
|
pub fn new() -> Result<Self, Error> {
|
|
// Create MediaStreams to playback decoded audio
|
|
// The audio context is used to reproduce audio.
|
|
let audio_context = configure_audio_context();
|
|
Ok(AudioSystem(audio_context))
|
|
}
|
|
|
|
pub fn start_recording(&mut self, each: impl FnMut(Vec<u8>) + 'static) -> Result<(), Error> {
|
|
let audio_context_worklet = self.0.clone();
|
|
spawn(async move {
|
|
match run_encoder_worklet(&audio_context_worklet, each).await {
|
|
Ok(node) => info!("created encoder worklet: {:?}", &node),
|
|
Err(err) => error!("could not create encoder worklet: {err}"),
|
|
}
|
|
});
|
|
Ok(())
|
|
}
|
|
|
|
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> {
|
|
let audio_context = &self.0;
|
|
|
|
let audio_stream_generator =
|
|
MediaStreamTrackGenerator::new(&MediaStreamTrackGeneratorInit::new("audio")).ey()?;
|
|
|
|
// Create MediaStream from MediaStreamTrackGenerator
|
|
let js_tracks = web_sys::js_sys::Array::new();
|
|
js_tracks.push(&audio_stream_generator);
|
|
let media_stream = MediaStream::new_with_tracks(&js_tracks).ey()?;
|
|
|
|
// Create MediaStreamAudioSourceNode
|
|
let audio_source = audio_context
|
|
.create_media_stream_source(&media_stream)
|
|
.ey()?;
|
|
// Connect output of audio_source to audio_context (browser audio)
|
|
audio_source
|
|
.connect_with_audio_node(&audio_context.destination())
|
|
.ey()?;
|
|
|
|
// Create callback functions for AudioDecoder
|
|
let decoder_error = Closure::wrap(Box::new(move |e: JsValue| {
|
|
error!("error decoding audio {:?}", e);
|
|
}) as Box<dyn FnMut(JsValue)>);
|
|
|
|
// This knows what MediaStreamTrackGenerator to use as it closes around it
|
|
let output = Closure::wrap(Box::new(move |audio_data: AudioData| {
|
|
let writable = audio_stream_generator.writable();
|
|
if writable.locked() {
|
|
return;
|
|
}
|
|
if let Err(e) = writable.get_writer().map(|writer| {
|
|
spawn(async move {
|
|
if let Err(e) = JsFuture::from(writer.ready()).await.ey() {
|
|
error!("write chunk ready error {:?}", e);
|
|
}
|
|
if let Err(e) = JsFuture::from(writer.write_with_chunk(&audio_data))
|
|
.await
|
|
.ey()
|
|
{
|
|
error!("write chunk error {:?}", e);
|
|
};
|
|
writer.release_lock();
|
|
});
|
|
}) {
|
|
error!("error writing audio data {:?}", e);
|
|
}
|
|
}) as Box<dyn FnMut(AudioData)>);
|
|
|
|
let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new(
|
|
decoder_error.as_ref().unchecked_ref(),
|
|
output.as_ref().unchecked_ref(),
|
|
))
|
|
.ey()?;
|
|
|
|
audio_decoder.configure(&AudioDecoderConfig::new("opus", 1, 48000));
|
|
info!("created audio decoder");
|
|
|
|
// This is required to prevent these from being deallocated
|
|
decoder_error.forget();
|
|
output.forget();
|
|
|
|
Ok(AudioPlayer(audio_decoder))
|
|
}
|
|
}
|
|
|
|
pub struct AudioPlayer(AudioDecoder);
|
|
|
|
impl AudioPlayer {
|
|
pub fn play_opus(&mut self, payload: &[u8]) {
|
|
let js_audio_payload = Uint8Array::from(payload);
|
|
let _ = self.0.decode(
|
|
&EncodedAudioChunk::new(&EncodedAudioChunkInit::new(
|
|
&js_audio_payload.into(),
|
|
0.0,
|
|
EncodedAudioChunkType::Key,
|
|
))
|
|
.unwrap(),
|
|
);
|
|
}
|
|
}
|
|
|
|
// Borrowed from
|
|
// https://github.com/security-union/videocall-rs/blob/main/videocall-client/src/decode/config.rs#L6
|
|
fn configure_audio_context() -> AudioContext {
|
|
let mut audio_context_options = AudioContextOptions::new();
|
|
audio_context_options.sample_rate(48000 as f32);
|
|
let audio_context = AudioContext::new_with_context_options(&audio_context_options).unwrap();
|
|
audio_context
|
|
}
|
|
|
|
trait PromiseExt {
|
|
fn into_future(self) -> JsFuture;
|
|
}
|
|
|
|
impl PromiseExt for Promise {
|
|
fn into_future(self) -> JsFuture {
|
|
self.into()
|
|
}
|
|
}
|
|
|
|
async fn run_encoder_worklet(
|
|
audio_context: &AudioContext,
|
|
mut each: impl FnMut(Vec<u8>) + 'static,
|
|
) -> Result<AudioWorkletNode, Error> {
|
|
let stream = window()
|
|
.unwrap()
|
|
.navigator()
|
|
.media_devices()
|
|
.ey()?
|
|
.get_user_media_with_constraints(MediaStreamConstraints::new().audio(&JsValue::TRUE))
|
|
.ey()?
|
|
.into_future()
|
|
.await
|
|
.ey()?
|
|
.dyn_into()
|
|
.map_err(|e| JsError::new(&format!("not a stream: {e:?}")))
|
|
.ey()?;
|
|
|
|
let options = WorkletOptions::new();
|
|
Reflect::set(
|
|
&options,
|
|
&"processorOptions".into(),
|
|
&wasm_bindgen::module(),
|
|
)
|
|
.ey()?;
|
|
|
|
let module = asset!("assets/rust_mic_worklet.js").to_string();
|
|
info!("loading mic worklet from {module:?}");
|
|
audio_context
|
|
.audio_worklet()
|
|
.ey()?
|
|
.add_module_with_options(&module, &options)
|
|
.ey()?
|
|
.into_future()
|
|
.await
|
|
.ey()?;
|
|
|
|
let source = audio_context.create_media_stream_source(&stream).ey()?;
|
|
let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?;
|
|
|
|
let encoder_error: Closure<dyn FnMut(JsValue)> =
|
|
Closure::new(|e| error!("error encoding audio {:?}", e));
|
|
|
|
// This knows what MediaStreamTrackGenerator to use as it closes around it
|
|
let output: Closure<dyn FnMut(EncodedAudioChunk)> =
|
|
Closure::new(move |audio_data: EncodedAudioChunk| {
|
|
let mut array = vec![0u8; audio_data.byte_length() as usize];
|
|
audio_data.copy_to_with_u8_slice(&mut array);
|
|
each(array);
|
|
});
|
|
|
|
let audio_encoder = AudioEncoder::new(&AudioEncoderInit::new(
|
|
encoder_error.as_ref().unchecked_ref(),
|
|
output.as_ref().unchecked_ref(),
|
|
))
|
|
.unwrap();
|
|
|
|
// This is required to prevent these from being deallocated
|
|
encoder_error.forget();
|
|
output.forget();
|
|
let encoder_config = AudioEncoderConfig::new("opus");
|
|
encoder_config.set_number_of_channels(1);
|
|
encoder_config.set_sample_rate(48000);
|
|
encoder_config.set_bitrate(72_000.0);
|
|
|
|
audio_encoder.configure(&encoder_config);
|
|
info!("created audio encoder");
|
|
|
|
let download_buffer = std::cell::RefCell::new(Vec::new());
|
|
|
|
let onmessage: Closure<dyn FnMut(MessageEvent)> = Closure::new(move |event: MessageEvent| {
|
|
match AudioData::new(event.data().unchecked_ref()) {
|
|
Ok(data) => {
|
|
let x = web_sys::AudioDataCopyToOptions::new(0);
|
|
x.set_format(web_sys::AudioSampleFormat::F32);
|
|
let mut sub_buffer = vec![0; data.allocation_size(&x).unwrap() as usize];
|
|
data.copy_to_with_u8_slice(&mut sub_buffer, &x);
|
|
download_buffer.borrow_mut().append(&mut sub_buffer);
|
|
if download_buffer.borrow().len() > 48000 * 10 * 4 {
|
|
//pub fn download_data(data: Vec<u8>, filename: &str) -> Result<(), JsValue> {
|
|
//download_data(download_buffer.borrow().to_vec(), "download_buffer.pcm32");
|
|
download_buffer.borrow_mut().clear();
|
|
}
|
|
|
|
audio_encoder.encode(&data);
|
|
}
|
|
Err(err) => {
|
|
error!(
|
|
"error creating AudioData object {:?} during event {:?}",
|
|
err, event,
|
|
);
|
|
}
|
|
}
|
|
});
|
|
Reflect::set(
|
|
&Reflect::get(&worklet_node, &"port".into()).ey()?,
|
|
&"onmessage".into(),
|
|
onmessage.as_ref(),
|
|
)
|
|
.ey()?;
|
|
onmessage.forget();
|
|
|
|
source.connect_with_audio_node(&worklet_node).ey()?;
|
|
worklet_node
|
|
.connect_with_audio_node(&audio_context.destination())
|
|
.ey()?;
|
|
|
|
Ok(worklet_node)
|
|
}
|
|
|
|
#[instrument]
|
|
pub async fn network_connect(
|
|
address: String,
|
|
username: String,
|
|
event_rx: &mut UnboundedReceiver<Command>,
|
|
) -> Result<(), Error> {
|
|
info!("Rust via WASM!");
|
|
|
|
let object = web_sys::js_sys::Object::new();
|
|
|
|
Reflect::set(
|
|
&object,
|
|
&JsValue::from_str("algorithm"),
|
|
&JsValue::from_str("sha-256"),
|
|
)
|
|
.ey()?;
|
|
|
|
if let Some(server_hash) = &CONFIG.cert_hash {
|
|
let hash = web_sys::js_sys::Uint8Array::from(server_hash.as_slice());
|
|
web_sys::js_sys::Reflect::set(&object, &"value".into(), &hash).ey()?;
|
|
}
|
|
|
|
let array = web_sys::js_sys::Array::new();
|
|
array.push(&object);
|
|
|
|
debug!("created option object: {:?}", &object);
|
|
|
|
let mut options = WebTransportOptions::new();
|
|
options.set_server_certificate_hashes(&array);
|
|
|
|
debug!("created WebTransportOptions");
|
|
console::log_1(&options.clone().into());
|
|
|
|
let transport = WebTransport::new_with_options(&address, &options).ey()?;
|
|
debug!("created WebTransport connection object");
|
|
console::log_1(&transport.clone().into());
|
|
|
|
if let Err(e) = wasm_bindgen_futures::JsFuture::from(transport.ready())
|
|
.await
|
|
.ey()
|
|
{
|
|
bail!("could not connect to transport: {e}");
|
|
}
|
|
|
|
info!("transport is ready");
|
|
|
|
let stream: WebTransportBidirectionalStream =
|
|
wasm_bindgen_futures::JsFuture::from(transport.create_bidirectional_stream())
|
|
.await
|
|
.ey()?
|
|
.into();
|
|
|
|
let wasm_stream_readable = wasm_streams::ReadableStream::from_raw(stream.readable().into());
|
|
let wasm_stream_writable = wasm_streams::WritableStream::from_raw(stream.writable().into());
|
|
|
|
let read_codec = ClientControlCodec::new();
|
|
let write_codec = ClientControlCodec::new();
|
|
|
|
let reader =
|
|
asynchronous_codec::FramedRead::new(wasm_stream_readable.into_async_read(), read_codec);
|
|
let writer =
|
|
asynchronous_codec::FramedWrite::new(wasm_stream_writable.into_async_write(), write_codec);
|
|
|
|
crate::network_loop(username, event_rx, reader, writer).await
|
|
}
|
|
|
|
pub fn set_default_username(username: &str) -> Option<()> {
|
|
web_sys::window()?
|
|
.local_storage()
|
|
.ok()??
|
|
.set_item("username", username)
|
|
.ok()
|
|
}
|
|
|
|
pub fn load_username() -> Option<String> {
|
|
web_sys::window()
|
|
.unwrap()
|
|
.local_storage()
|
|
.ok()??
|
|
.get_item("username")
|
|
.ok()?
|
|
}
|
|
|
|
fn load_config_from_window() -> Option<GuiConfig> {
|
|
serde_wasm_bindgen::from_value(Reflect::get(window()?.as_ref(), &"config".into()).ok()?).ok()
|
|
}
|
|
|
|
fn load_config_from_env() -> Option<GuiConfig> {
|
|
serde_json::from_str(option_env!("MUMBLE_WEB2_GUI_CONFIG")?).ok()?
|
|
}
|
|
|
|
pub fn load_config() -> Option<GuiConfig> {
|
|
load_config_from_window().or_else(load_config_from_env)
|
|
}
|
|
|
|
pub fn init_logging() {
|
|
// copied from tracing_web example usage
|
|
|
|
use tracing_subscriber::fmt::format::Pretty;
|
|
use tracing_subscriber::prelude::*;
|
|
use tracing_web::{performance_layer, MakeWebConsoleWriter};
|
|
|
|
let fmt_layer = tracing_subscriber::fmt::layer()
|
|
.with_ansi(false) // Only partially supported across browsers
|
|
.without_time() // std::time is not available in browsers
|
|
.with_writer(MakeWebConsoleWriter::new()); // write events to the console
|
|
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
|
|
|
|
tracing_subscriber::registry()
|
|
.with(fmt_layer)
|
|
.with(perf_layer)
|
|
.init();
|
|
}
|