Refactor the imp/gui bondary to use real traits #18

Merged
liamwarfield merged 8 commits from gui-platform-boundary-refactor into main 2026-02-18 04:53:41 +00:00
12 changed files with 621 additions and 292 deletions
-1
View File
@@ -146,7 +146,6 @@ desktop = [
"rfd/xdg-portal", "rfd/xdg-portal",
"etcetera", "etcetera",
] ]
mobile = [ mobile = [
"dioxus/mobile", "dioxus/mobile",
"tokio", "tokio",
+9 -9
View File
@@ -6,7 +6,7 @@ use mumble_web2_common::{ClientConfig, ServerStatus};
use ordermap::OrderSet; use ordermap::OrderSet;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use crate::imp; use crate::imp::{Platform, PlatformInterface as _};
pub type ChannelId = u32; pub type ChannelId = u32;
pub type UserId = u32; pub type UserId = u32;
@@ -690,12 +690,12 @@ pub fn LoginView(config: Resource<ClientConfig>) -> Element {
use_resource(move || async move { use_resource(move || async move {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
loop { loop {
*last_status.write_unchecked() = Some(imp::get_status(&client).await); *last_status.write_unchecked() = Some(Platform::get_status(&client).await);
imp::sleep(std::time::Duration::from_secs_f32(1.0)).await; Platform::sleep(std::time::Duration::from_secs_f32(1.0)).await;
} }
}); });
let mut address_input = use_signal(|| imp::load_server_url()); let mut address_input = use_signal(|| Platform::load_server_url());
let address = use_memo(move || { let address = use_memo(move || {
if let Some(addr) = address_input() { if let Some(addr) = address_input() {
addr.clone() addr.clone()
@@ -706,14 +706,14 @@ pub fn LoginView(config: Resource<ClientConfig>) -> Element {
} }
}); });
let previous_username = imp::load_username(); let previous_username = Platform::load_username();
let mut username = use_signal(|| previous_username.unwrap_or(String::new())); let mut username = use_signal(|| previous_username.unwrap_or(String::new()));
let do_connect = move |_| { let do_connect = move |_| {
//let _ = set_default_username(&username.read()); //let _ = set_default_username(&username.read());
let _ = imp::set_default_username(&username.read()); let _ = Platform::set_default_username(&username.read());
if config.read().as_ref().is_some_and(|cfg| cfg.any_server) { if config.read().as_ref().is_some_and(|cfg| cfg.any_server) {
imp::set_default_server(&address.read()); Platform::set_default_server(&address.read());
} }
net.send(Connect { net.send(Connect {
address: address.read().clone(), address: address.read().clone(),
@@ -860,13 +860,13 @@ pub fn app() -> Element {
use_coroutine(|rx: UnboundedReceiver<Command>| super::network_entrypoint(rx)); use_coroutine(|rx: UnboundedReceiver<Command>| super::network_entrypoint(rx));
let config = use_resource(|| async move { let config = use_resource(|| async move {
match imp::load_config().await { match Platform::load_config().await {
Ok(config) => config, Ok(config) => config,
Err(_) => ClientConfig::default(), Err(_) => ClientConfig::default(),
} }
}); });
imp::request_permissions(); Platform::request_permissions();
rsx!( rsx!(
document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Nunito:ital,wght@0,200..1000;1,200..1000&display=swap" } document::Link{ rel: "stylesheet", href: "https://fonts.googleapis.com/css2?family=Nunito:ital,wght@0,200..1000;1,200..1000&display=swap" }
+11 -9
View File
@@ -7,7 +7,7 @@ use std::cell::RefCell;
use std::sync::Arc; use std::sync::Arc;
use tracing::{error, info}; use tracing::{error, info};
use crate::imp; use crate::imp::SpawnHandle;
static DF_MODEL: Asset = asset!("/assets/DeepFilterNet3_ll_onnx.tar.gz"); static DF_MODEL: Asset = asset!("/assets/DeepFilterNet3_ll_onnx.tar.gz");
// TODO: make this user configurable. // TODO: make this user configurable.
@@ -32,10 +32,7 @@ enum DenoisingModelState {
Availible(Box<DfTract>), Availible(Box<DfTract>),
} }
fn with_denoising_model<O>( fn with_denoising_model<O>(spawn: &SpawnHandle, func: impl FnOnce(&mut DfTract) -> O) -> Option<O> {
spawn: &imp::SpawnHandle,
func: impl FnOnce(&mut DfTract) -> O,
) -> Option<O> {
// Using a thread local is super gross, but DfTract is not Send (so it can never leave the current // Using a thread local is super gross, but DfTract is not Send (so it can never leave the current
// thread) while AudioProcessing itself might change threads whenever. // thread) while AudioProcessing itself might change threads whenever.
thread_local! { thread_local! {
@@ -89,7 +86,7 @@ fn with_denoising_model<O>(
pub struct AudioProcessor { pub struct AudioProcessor {
denoise: bool, denoise: bool,
spawn: imp::SpawnHandle, spawn: SpawnHandle,
buffer: Vec<f32>, buffer: Vec<f32>,
noise_floor: f32, noise_floor: f32,
/// Whether we were transmitting in the previous frame /// Whether we were transmitting in the previous frame
@@ -102,7 +99,7 @@ impl AudioProcessor {
pub fn new_plain() -> Self { pub fn new_plain() -> Self {
AudioProcessor { AudioProcessor {
denoise: false, denoise: false,
spawn: imp::SpawnHandle::current(), spawn: SpawnHandle::current(),
buffer: Vec::new(), buffer: Vec::new(),
noise_floor: DEFAULT_NOISE_FLOOR, noise_floor: DEFAULT_NOISE_FLOOR,
was_transmitting: false, was_transmitting: false,
@@ -113,7 +110,7 @@ impl AudioProcessor {
pub fn new_denoising() -> Self { pub fn new_denoising() -> Self {
AudioProcessor { AudioProcessor {
denoise: true, denoise: true,
spawn: imp::SpawnHandle::current(), spawn: SpawnHandle::current(),
buffer: Vec::new(), buffer: Vec::new(),
noise_floor: DEFAULT_NOISE_FLOOR, noise_floor: DEFAULT_NOISE_FLOOR,
was_transmitting: false, was_transmitting: false,
@@ -123,7 +120,12 @@ impl AudioProcessor {
} }
impl AudioProcessor { impl AudioProcessor {
pub fn process(&mut self, audio: &[f32], channels: usize, output: &mut Vec<f32>) -> TransmitState { pub fn process(
&mut self,
audio: &[f32],
channels: usize,
output: &mut Vec<f32>,
) -> TransmitState {
let mut include_raw = true; let mut include_raw = true;
if self.denoise { if self.denoise {
with_denoising_model(&self.spawn, |df| { with_denoising_model(&self.spawn, |df| {
+5
View File
@@ -108,3 +108,8 @@ pub async fn network_connect(
pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> { pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
bail!("status not supported on desktop yet") bail!("status not supported on desktop yet")
} }
#[allow(unused)]
pub use tokio::spawn;
#[allow(unused)]
pub type SpawnHandle = tokio::runtime::Handle;
+77 -51
View File
@@ -1,12 +1,83 @@
use crate::app::Command;
Outdated
Review

Add a doc comment here for what this file is, here and elsewhere in imp

Add a doc comment here for what this file is, here and elsewhere in imp
Outdated
Review

I'm not sure if this is actually needed.

I'm not sure if this is actually needed.
use color_eyre::eyre::Error;
use dioxus::hooks::UnboundedReceiver;
use etcetera::{choose_app_strategy, AppStrategy, AppStrategyArgs}; use etcetera::{choose_app_strategy, AppStrategy, AppStrategyArgs};
use mumble_web2_common::ClientConfig; use mumble_web2_common::{ClientConfig, ServerStatus};
use std::collections::HashMap; use std::collections::HashMap;
pub use tokio::runtime::Handle as SpawnHandle; use std::time::Duration;
pub use tokio::task::spawn;
pub use tokio::time::sleep;
pub use super::connect::*; /// Desktop platform implementation using Tokio and native audio.
liamwarfield marked this conversation as resolved Outdated
Outdated
Review

Delete this bar

Delete this bar
pub use super::native_audio::*; pub struct DesktopPlatform;
impl super::PlatformInterface for DesktopPlatform {
type AudioSystem = super::native_audio::NativeAudioSystem;
async fn sleep(duration: Duration) {
tokio::time::sleep(duration).await;
}
async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
fn load_username() -> Option<String> {
let config = load_config_map();
config.get("username").cloned()
}
fn load_server_url() -> Option<String> {
let config = load_config_map();
config.get("server").cloned()
}
fn set_default_username(username: &str) -> Option<()> {
let mut config = load_config_map();
config.insert("username".to_string(), username.to_string());
save_config_map(&config).ok()
}
fn set_default_server(server: &str) -> Option<()> {
let mut config = load_config_map();
config.insert("server".to_string(), server.to_string());
save_config_map(&config).ok()
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> Result<(), Error> {
super::connect::network_connect(address, username, event_rx, gui_config).await
}
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
super::connect::get_status(client).await
}
fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
fn request_permissions() {
// No-op on desktop
}
}
fn get_config_path() -> std::path::PathBuf { fn get_config_path() -> std::path::PathBuf {
let strategy = choose_app_strategy(AppStrategyArgs { let strategy = choose_app_strategy(AppStrategyArgs {
@@ -35,48 +106,3 @@ fn save_config_map(config: &HashMap<String, String>) -> color_eyre::Result<()> {
std::fs::write(&config_path, contents)?; std::fs::write(&config_path, contents)?;
Ok(()) Ok(())
} }
pub fn set_default_username(username: &str) -> Option<()> {
let mut config = load_config_map();
config.insert("username".to_string(), username.to_string());
save_config_map(&config).ok()
}
pub fn set_default_server(server: &str) -> Option<()> {
let mut config = load_config_map();
config.insert("server".to_string(), server.to_string());
save_config_map(&config).ok()
}
pub fn load_username() -> Option<String> {
let config = load_config_map();
config.get("username").cloned()
}
pub fn load_server_url() -> Option<String> {
let config = load_config_map();
config.get("server").cloned()
}
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
pub fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
3
+74 -50
View File
@@ -1,61 +1,85 @@
use android_permissions::{PermissionManager, RECORD_AUDIO}; use crate::app::Command;
use jni::{objects::JObject, JavaVM}; use color_eyre::eyre::Error;
use mumble_web2_common::ClientConfig; use dioxus::hooks::UnboundedReceiver;
use mumble_web2_common::{ClientConfig, ServerStatus};
use std::future::Future;
use std::time::Duration;
use std::collections::HashMap; /// Mobile platform implementation using Tokio, native audio, and Android permissions.
pub use tokio::runtime::Handle as SpawnHandle; pub struct MobilePlatform;
pub use tokio::task::spawn;
pub use tokio::time::sleep;
pub use super::connect::*; impl super::PlatformInterface for MobilePlatform {
pub use super::native_audio::*; type AudioSystem = super::native_audio::NativeAudioSystem;
pub fn set_default_username(username: &str) -> Option<()> { async fn load_config() -> color_eyre::Result<ClientConfig> {
None Ok(ClientConfig {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
fn load_username() -> Option<String> {
None
}
fn load_server_url() -> Option<String> {
None
}
fn set_default_username(_username: &str) -> Option<()> {
None
}
fn set_default_server(server: &str) -> Option<()> {
None
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> Result<(), Error> {
super::connect::network_connect(address, username, event_rx, gui_config).await
}
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
super::connect::get_status(client).await
}
fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
fn request_permissions() {
request_recording_permission();
}
async fn sleep(duration: Duration) {
tokio::time::sleep(duration).await;
}
} }
pub fn set_default_server(server: &str) -> Option<()> { #[cfg(not(target_os = "android"))]
None pub fn request_recording_permission() {}
}
pub fn load_username() -> Option<String> {
None
}
pub fn load_server_url() -> Option<String> {
None
}
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
Ok(ClientConfig {
proxy_url: None,
cert_hash: None,
any_server: true,
})
}
pub fn init_logging() {
use tracing::level_filters::LevelFilter;
use tracing_subscriber::filter::EnvFilter;
let env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy();
tracing_subscriber::fmt()
.with_target(true)
.with_level(true)
.with_env_filter(env_filter)
.init();
}
#[cfg(feature = "mobile")]
pub fn request_permissions() {
request_recording_permission();
}
#[cfg(target_os = "android")] #[cfg(target_os = "android")]
pub fn request_recording_permission() { pub fn request_recording_permission() {
use android_permissions::{PermissionManager, RECORD_AUDIO};
use jni::{objects::JObject, JavaVM};
let ctx = ndk_context::android_context(); let ctx = ndk_context::android_context();
let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() }; let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() };
let activity = unsafe { JObject::from_raw(ctx.context().cast()) }; let activity = unsafe { JObject::from_raw(ctx.context().cast()) };
+154 -17
View File
@@ -1,29 +1,166 @@
#[cfg(feature = "web")] //! Platform abstraction layer
mod web; //!
//! This module defines traits that each platform (web, desktop, mobile) must implement.
//! The traits make the platform boundary explicit and provide compile-time verification.
#![allow(async_fn_in_trait)]
use crate::{app::Command, effects::AudioProcessor};
use color_eyre::eyre::Error;
use dioxus::hooks::UnboundedReceiver;
use mumble_web2_common::{ClientConfig, ServerStatus};
use std::future::Future;
use std::time::Duration;
// ============================================================================
// Trait Definitions
// ============================================================================
/// Platform-specific audio subsystem for capturing microphone input and creating playback streams.
liamwarfield marked this conversation as resolved Outdated
Outdated
Review

We need to add a doc comment for the overall trait here.

We need to add a doc comment for the overall trait here.
Outdated
Review

Done

Done
///
/// The audio system handles Opus encoding internally - callers receive encoded frames
/// ready for network transmission.
pub trait AudioSystemInterface: Sized {
/// The player type returned by [`create_player`](Self::create_player).
type AudioPlayer: AudioPlayerInterface;
/// Initialize the audio system.
async fn new() -> Result<Self, Error>;
/// Set the processor for the microphone input, mainly noise cancellation settings.
fn set_processor(&self, processor: AudioProcessor);
/// Begin listening to microphone input, calling the `each` function with
/// encoded opus frames.
fn start_recording(
&mut self,
each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error>;
/// Begin playback of an audio stream, returning an object that can be passed opus frames.
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error>;
}
/// A handle to an active audio playback stream for a single remote user.
///
/// Each connected user gets their own `AudioPlayer` instance, which decodes
/// incoming Opus frames and outputs PCM audio to the platform's audio device.
/// The player manages its own decoder state and output buffer.
pub trait AudioPlayerInterface {
/// Decode and play an Opus-encoded audio frame.
fn play_opus(&mut self, payload: &[u8]);
}
/// This is the main trait that each platform must implement. It combines all
/// platform-specific functionality into a single interface, providing compile-time
/// verification that all platforms implement the required functionality.
pub trait PlatformInterface {
type AudioSystem: AudioSystemInterface;
/// Initialize logging for the platform.
fn init_logging();
/// Request runtime permissions (Android audio recording, etc.).
fn request_permissions();
/// Establish a connection to the Mumble server and run the network loop.
fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> impl Future<Output = Result<(), Error>>;
/// Get server status (user count, version, etc.).
fn get_status(
client: &reqwest::Client,
) -> impl Future<Output = color_eyre::Result<ServerStatus>>;
/// Load the proxy overrides (proxy URL, cert hash, etc.).
fn load_config() -> impl Future<Output = color_eyre::Result<ClientConfig>>;
/// Load saved username.
fn load_username() -> Option<String>;
/// Load saved server URL.
fn load_server_url() -> Option<String>;
/// Save the default username.
fn set_default_username(username: &str) -> Option<()>;
/// Save the default server URL.
fn set_default_server(server: &str) -> Option<()>;
/// Async sleep for the given duration.
fn sleep(duration: Duration) -> impl Future<Output = ()>;
}
// ============================================================================
// Platform Modules
// ============================================================================
#[cfg(any(feature = "desktop", feature = "mobile"))] #[cfg(any(feature = "desktop", feature = "mobile"))]
mod connect; mod connect;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod native_audio;
#[cfg(feature = "desktop")] #[cfg(feature = "desktop")]
mod desktop; mod desktop;
#[cfg(feature = "mobile")] #[cfg(feature = "mobile")]
mod mobile; mod mobile;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod native_audio;
mod stub;
#[cfg(feature = "web")]
mod web;
#[cfg(feature = "desktop")] // ============================================================================
pub use desktop::*; // Platform Type Alias
#[cfg(feature = "mobile")] // ============================================================================
pub use mobile::*;
#[cfg(feature = "mobile")] #[cfg(feature = "web")]
pub use mobile::request_permissions; pub type Platform = web::WebPlatform;
#[cfg(any(feature = "desktop", feature = "web"))] #[cfg(all(feature = "desktop", not(feature = "web")))]
pub fn request_permissions() {} pub type Platform = desktop::DesktopPlatform;
#[cfg(all(feature = "web", not(any(feature = "desktop", feature = "mobile"))))] #[cfg(all(feature = "mobile", not(feature = "web"), not(feature = "desktop")))]
pub use web::*; pub type Platform = mobile::MobilePlatform;
#[cfg(any(feature = "desktop"))] #[cfg(all(
pub use desktop::*; not(feature = "mobile"),
not(feature = "web"),
not(feature = "desktop")
))]
pub type Platform = stub::StubPlatform;
pub type AudioSystem = <Platform as PlatformInterface>::AudioSystem;
pub type AudioPlayer = <AudioSystem as AudioSystemInterface>::AudioPlayer;
// ========================
// Platform Async Runtime
// ========================
// Note: these can not be part of the Platform because they differ in Send requiremets
#[cfg(all(any(feature = "desktop", feature = "mobile"), not(feature = "web")))]
pub use connect::{spawn, SpawnHandle};
#[cfg(all(
not(feature = "desktop"),
not(feature = "mobile"),
not(feature = "web")
))]
pub use stub::{spawn, SpawnHandle};
#[cfg(feature = "web")]
pub use web::{spawn, SpawnHandle};
// =======================
// Compile-time Assertions
// =======================
const _: () = {
fn assert_platform<T: PlatformInterface>() {}
// Check each implementation, and prevent warnings that the implementations are unused.
#[cfg(feature = "web")]
let _ = assert_platform::<web::WebPlatform>;
#[cfg(feature = "desktop")]
let _ = assert_platform::<desktop::DesktopPlatform>;
#[cfg(feature = "mobile")]
let _ = assert_platform::<mobile::MobilePlatform>;
let _ = assert_platform::<stub::StubPlatform>;
};
+34 -37
View File
@@ -1,19 +1,12 @@
use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState}; use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
use color_eyre::eyre::{eyre, Error}; use color_eyre::eyre::{eyre, Error};
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _}; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
use futures::io::{AsyncRead, AsyncWrite};
use std::mem::replace; use std::mem::replace;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use tracing::{error, info, warn}; use tracing::{error, info, warn};
pub trait ImpRead: AsyncRead + Unpin + Send + 'static {} pub struct NativeAudioSystem {
impl<T: AsyncRead + Unpin + Send + 'static> ImpRead for T {}
pub trait ImpWrite: AsyncWrite + Unpin + Send + 'static {}
impl<T: AsyncWrite + Unpin + Send + 'static> ImpWrite for T {}
pub struct AudioSystem {
output: cpal::Device, output: cpal::Device,
input: cpal::Device, input: cpal::Device,
processors: AudioProcessorSender, processors: AudioProcessorSender,
@@ -52,28 +45,7 @@ fn encode_and_send(
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>; type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl AudioSystem { impl NativeAudioSystem {
pub async fn new() -> Result<Self, Error> {
// TODO
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(AudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
pub fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn choose_config( fn choose_config(
&self, &self,
configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>, configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>,
@@ -103,8 +75,32 @@ impl AudioSystem {
.cloned() .cloned()
.ok_or(eyre!("no supported stream configs")) .ok_or(eyre!("no supported stream configs"))
} }
}
pub fn start_recording( impl super::AudioSystemInterface for NativeAudioSystem {
type AudioPlayer = NativeAudioPlayer;
async fn new() -> Result<Self, Error> {
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(NativeAudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn start_recording(
&mut self, &mut self,
mut each: impl FnMut(Vec<u8>, bool) + Send + 'static, mut each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error> { ) -> Result<(), Error> {
@@ -124,7 +120,8 @@ impl AudioSystem {
if let Some(new_processor) = processors.take() { if let Some(new_processor) = processors.take() {
current_processor = new_processor; current_processor = new_processor;
} }
let state = current_processor.process(frame, config.channels as usize, &mut output_buffer); let state =
current_processor.process(frame, config.channels as usize, &mut output_buffer);
encode_and_send(state, &mut output_buffer, &mut encoder, &mut each); encode_and_send(state, &mut output_buffer, &mut encoder, &mut each);
}; };
@@ -144,7 +141,7 @@ impl AudioSystem {
} }
} }
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> { fn create_player(&mut self) -> Result<NativeAudioPlayer, Error> {
let config = self.choose_config(self.output.supported_output_configs()?)?; let config = self.choose_config(self.output.supported_output_configs()?)?;
info!( info!(
"creating player on {:?} with {:#?}", "creating player on {:?} with {:#?}",
@@ -182,7 +179,7 @@ impl AudioSystem {
)? )?
}; };
stream.play()?; stream.play()?;
Ok(AudioPlayer { Ok(NativeAudioPlayer {
decoder, decoder,
stream, stream,
buffer, buffer,
@@ -191,15 +188,15 @@ impl AudioSystem {
} }
} }
pub struct AudioPlayer { pub struct NativeAudioPlayer {
decoder: opus::Decoder, decoder: opus::Decoder,
stream: cpal::Stream, stream: cpal::Stream,
buffer: Buffer, buffer: Buffer,
tmp: Vec<i16>, tmp: Vec<i16>,
} }
impl AudioPlayer { impl super::AudioPlayerInterface for NativeAudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) { fn play_opus(&mut self, payload: &[u8]) {
let len = match self.decoder.decode(payload, &mut self.tmp, false) { let len = match self.decoder.decode(payload, &mut self.tmp, false) {
Ok(l) => l, Ok(l) => l,
Err(e) => { Err(e) => {
+119
View File
@@ -0,0 +1,119 @@
/// Stub implementation of the platform interface, so that we can
/// `cargo check` without any --feature flags.
use crate::effects::AudioProcessor;
use color_eyre::eyre::Error;
use dioxus::hooks::UnboundedReceiver;
use mumble_web2_common::{ClientConfig, ServerStatus};
use std::future::Future;
pub struct StubPlatform;
impl super::PlatformInterface for StubPlatform {
type AudioSystem = StubAudioSystem;
fn init_logging() {
panic!("stubbed platform")
}
fn request_permissions() {
panic!("stubbed platform")
}
fn network_connect(
_address: String,
_username: String,
_event_rx: &mut UnboundedReceiver<crate::app::Command>,
_gui_config: &ClientConfig,
) -> impl Future<Output = Result<(), Error>> {
async { panic!("stubbed platform") }
}
fn get_status(
_client: &reqwest::Client,
) -> impl Future<Output = color_eyre::Result<ServerStatus>> {
async { panic!("stubbed platform") }
}
fn load_config() -> impl Future<Output = color_eyre::Result<ClientConfig>> {
async { panic!("stubbed platform") }
}
fn load_username() -> Option<String> {
panic!("stubbed platform")
}
fn load_server_url() -> Option<String> {
panic!("stubbed platform")
}
fn set_default_username(_username: &str) -> Option<()> {
panic!("stubbed platform")
}
fn set_default_server(_server: &str) -> Option<()> {
panic!("stubbed platform")
}
fn sleep(_duration: std::time::Duration) -> impl Future<Output = ()> {
async { panic!("stubbed platform") }
}
}
pub struct StubAudioSystem;
impl super::AudioSystemInterface for StubAudioSystem {
type AudioPlayer = StubAudioPlayer;
async fn new() -> Result<Self, Error> {
panic!("stubbed platform")
}
fn set_processor(&self, _processor: AudioProcessor) {
panic!("stubbed platform")
}
fn start_recording(
&mut self,
_each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error> {
panic!("stubbed platform")
}
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error> {
panic!("stubbed platform")
}
}
pub struct StubAudioPlayer;
impl super::AudioPlayerInterface for StubAudioPlayer {
fn play_opus(&mut self, _payload: &[u8]) {
panic!("stubbed platform")
}
}
#[allow(unused)]
pub struct SpawnHandle;
impl SpawnHandle {
#[allow(unused)]
pub fn spawn<F>(&self, _future: F)
where
F: Future<Output = ()> + 'static,
{
panic!("stubbed platform")
}
#[allow(unused)]
pub fn current() -> Self {
SpawnHandle
}
}
#[allow(unused)]
pub fn spawn<F>(_future: F)
where
F: Future<Output = ()> + 'static,
{
panic!("stubbed platform")
}
+122 -108
View File
@@ -3,7 +3,6 @@ use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
use color_eyre::eyre::{bail, eyre, Error}; use color_eyre::eyre::{bail, eyre, Error};
use crossbeam::atomic::AtomicCell; use crossbeam::atomic::AtomicCell;
use dioxus::prelude::*; use dioxus::prelude::*;
use futures::{AsyncRead, AsyncWrite};
use gloo_timers::future::TimeoutFuture; use gloo_timers::future::TimeoutFuture;
use js_sys::Float32Array; use js_sys::Float32Array;
use mumble_protocol::control::ClientControlCodec; use mumble_protocol::control::ClientControlCodec;
@@ -29,7 +28,6 @@ use web_sys::AudioWorkletNode;
use web_sys::EncodedAudioChunk; use web_sys::EncodedAudioChunk;
use web_sys::EncodedAudioChunkInit; use web_sys::EncodedAudioChunkInit;
use web_sys::EncodedAudioChunkType; use web_sys::EncodedAudioChunkType;
use web_sys::MediaStream;
use web_sys::MediaStreamConstraints; use web_sys::MediaStreamConstraints;
use web_sys::MessageEvent; use web_sys::MessageEvent;
use web_sys::WebTransport; use web_sys::WebTransport;
@@ -39,16 +37,119 @@ use web_sys::WorkletOptions;
use web_sys::{console, window}; use web_sys::{console, window};
use web_sys::{AudioContext, AudioDataCopyToOptions}; use web_sys::{AudioContext, AudioDataCopyToOptions};
#[allow(unused)]
pub use wasm_bindgen_futures::spawn_local as spawn; pub use wasm_bindgen_futures::spawn_local as spawn;
pub trait ImpRead: AsyncRead + Unpin + 'static {} #[allow(unused)]
impl<T: AsyncRead + Unpin + 'static> ImpRead for T {} #[derive(Clone)]
pub struct SpawnHandle;
pub trait ImpWrite: AsyncWrite + Unpin + 'static {} impl SpawnHandle {
impl<T: AsyncWrite + Unpin + 'static> ImpWrite for T {} pub fn spawn<F>(&self, future: F)
where
F: Future<Output = ()> + 'static,
{
wasm_bindgen_futures::spawn_local(future);
}
pub async fn sleep(d: Duration) { pub fn current() -> Self {
TimeoutFuture::new(d.as_millis() as u32).await SpawnHandle
}
}
/// Web platform implementation using WebTransport and Web Audio API.
pub struct WebPlatform;
impl super::PlatformInterface for WebPlatform {
type AudioSystem = WebAudioSystem;
fn init_logging() {
// copied from tracing_web example usage
use tracing_subscriber::fmt::format::Pretty;
use tracing_subscriber::prelude::*;
use tracing_web::{performance_layer, MakeWebConsoleWriter};
let fmt_layer = tracing_subscriber::fmt::layer()
.with_ansi(false) // Only partially supported across browsers
.without_time() // std::time is not available in browsers
.with_writer(MakeWebConsoleWriter::new()) // write events to the console
.with_filter(LevelFilter::DEBUG);
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
tracing_subscriber::registry()
.with(fmt_layer)
.with(perf_layer)
.init();
info!("logging initiated");
}
fn request_permissions() {
// No-op on web
}
async fn load_config() -> color_eyre::Result<ClientConfig> {
let config_url = match option_env!("MUMBLE_WEB2_GUI_CONFIG_URL") {
Some(url) => Url::parse(url)?,
None => absolute_url("config")?,
};
info!("loading config from {}", config_url);
let config = reqwest::get(config_url)
.await?
.json::<ClientConfig>()
.await?;
Ok(config)
}
fn load_username() -> Option<String> {
web_sys::window()
.unwrap()
.local_storage()
.ok()??
.get_item("username")
.ok()?
}
fn load_server_url() -> Option<String> {
None
}
fn set_default_username(username: &str) -> Option<()> {
web_sys::window()?
.local_storage()
.ok()??
.set_item("username", username)
.ok()
}
fn set_default_server(_server: &str) -> Option<()> {
None
}
async fn network_connect(
address: String,
username: String,
event_rx: &mut UnboundedReceiver<Command>,
gui_config: &ClientConfig,
) -> Result<(), Error> {
network_connect(address, username, event_rx, gui_config).await
}
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(absolute_url("status")?)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
async fn sleep(duration: Duration) {
TimeoutFuture::new(duration.as_millis() as u32).await;
}
} }
trait ResultExt<T> { trait ResultExt<T> {
@@ -73,7 +174,7 @@ impl<T> ResultExt<T> for Result<T, JsError> {
} }
} }
pub struct AudioSystem { pub struct WebAudioSystem {
webctx: AudioContext, webctx: AudioContext,
processors: AudioProcessorSender, processors: AudioProcessorSender,
} }
@@ -104,8 +205,10 @@ async fn attach_worklet(audio_context: &AudioContext) -> Result<(), Error> {
Ok(()) Ok(())
} }
impl AudioSystem { impl super::AudioSystemInterface for WebAudioSystem {
pub async fn new() -> Result<Self, Error> { type AudioPlayer = WebAudioPlayer;
async fn new() -> Result<Self, Error> {
// Create MediaStreams to playback decoded audio // Create MediaStreams to playback decoded audio
// The audio context is used to reproduce audio. // The audio context is used to reproduce audio.
let webctx = configure_audio_context(); let webctx = configure_audio_context();
@@ -113,17 +216,14 @@ impl AudioSystem {
let processors = AudioProcessorSender::default(); let processors = AudioProcessorSender::default();
Ok(AudioSystem { webctx, processors }) Ok(WebAudioSystem { webctx, processors })
} }
pub fn set_processor(&self, processor: AudioProcessor) { fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor)) self.processors.store(Some(processor))
} }
pub fn start_recording( fn start_recording(&mut self, each: impl FnMut(Vec<u8>, bool) + 'static) -> Result<(), Error> {
&mut self,
each: impl FnMut(Vec<u8>, bool) + 'static,
) -> Result<(), Error> {
let audio_context_worklet = self.webctx.clone(); let audio_context_worklet = self.webctx.clone();
let processors = self.processors.clone(); let processors = self.processors.clone();
spawn(async move { spawn(async move {
@@ -135,7 +235,7 @@ impl AudioSystem {
Ok(()) Ok(())
} }
pub fn create_player(&mut self) -> Result<AudioPlayer, Error> { fn create_player(&mut self) -> Result<WebAudioPlayer, Error> {
let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?; let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
// Connect worklet to destination // Connect worklet to destination
@@ -188,14 +288,14 @@ impl AudioSystem {
decoder_error.forget(); decoder_error.forget();
output.forget(); output.forget();
Ok(AudioPlayer(audio_decoder)) Ok(WebAudioPlayer(audio_decoder))
} }
} }
pub struct AudioPlayer(AudioDecoder); pub struct WebAudioPlayer(AudioDecoder);
impl AudioPlayer { impl super::AudioPlayerInterface for WebAudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) { fn play_opus(&mut self, payload: &[u8]) {
let js_audio_payload = Uint8Array::from(payload); let js_audio_payload = Uint8Array::from(payload);
let _ = self.0.decode( let _ = self.0.decode(
&EncodedAudioChunk::new(&EncodedAudioChunkInit::new( &EncodedAudioChunk::new(&EncodedAudioChunkInit::new(
@@ -418,94 +518,8 @@ pub async fn network_connect(
crate::network_loop(username, event_rx, reader, writer).await crate::network_loop(username, event_rx, reader, writer).await
} }
pub fn set_default_username(username: &str) -> Option<()> {
web_sys::window()?
.local_storage()
.ok()??
.set_item("username", username)
.ok()
}
pub fn set_default_server(username: &str) -> Option<()> {
None
}
pub fn load_username() -> Option<String> {
web_sys::window()
.unwrap()
.local_storage()
.ok()??
.get_item("username")
.ok()?
}
pub fn load_server_url() -> Option<String> {
None
}
pub fn absolute_url(path: &str) -> Result<Url, Error> { pub fn absolute_url(path: &str) -> Result<Url, Error> {
let window: web_sys::Window = web_sys::window().expect("no global `window` exists"); let window: web_sys::Window = web_sys::window().expect("no global `window` exists");
let location = window.location(); let location = window.location();
Ok(Url::parse(&location.href().ey()?)?.join(path)?) Ok(Url::parse(&location.href().ey()?)?.join(path)?)
} }
pub async fn load_config() -> color_eyre::Result<ClientConfig> {
let config_url = match option_env!("MUMBLE_WEB2_GUI_CONFIG_URL") {
Some(url) => Url::parse(url)?,
None => absolute_url("config")?,
};
info!("loading config from {}", config_url);
let config = reqwest::get(config_url)
.await?
.json::<ClientConfig>()
.await?;
Ok(config)
}
pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
Ok(client
.get(absolute_url("status")?)
.send()
.await?
.json::<ServerStatus>()
.await?)
}
pub fn init_logging() {
// copied from tracing_web example usage
use tracing_subscriber::fmt::format::Pretty;
use tracing_subscriber::prelude::*;
use tracing_web::{performance_layer, MakeWebConsoleWriter};
let fmt_layer = tracing_subscriber::fmt::layer()
.with_ansi(false) // Only partially supported across browsers
.without_time() // std::time is not available in browsers
.with_writer(MakeWebConsoleWriter::new()) // write events to the console
.with_filter(LevelFilter::DEBUG);
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
tracing_subscriber::registry()
.with(fmt_layer)
.with(perf_layer)
.init();
info!("logging initiated");
}
pub struct SpawnHandle;
impl SpawnHandle {
pub fn current() -> Self {
SpawnHandle
}
pub fn spawn<F>(&self, future: F)
where
F: Future<Output = ()> + 'static,
{
spawn(future);
}
}
+14 -8
View File
@@ -7,11 +7,12 @@ use asynchronous_codec::FramedWrite;
use color_eyre::eyre::{bail, Error}; use color_eyre::eyre::{bail, Error};
use dioxus::prelude::*; use dioxus::prelude::*;
use futures::select; use futures::select;
use futures::AsyncRead;
use futures::AsyncWrite;
use futures::FutureExt as _; use futures::FutureExt as _;
use futures::SinkExt as _; use futures::SinkExt as _;
use futures::StreamExt as _; use futures::StreamExt as _;
use futures_channel::mpsc::UnboundedSender; use futures_channel::mpsc::UnboundedSender;
pub use imp::spawn;
use msghtml::process_message_html; use msghtml::process_message_html;
use mumble_protocol::control::msgs; use mumble_protocol::control::msgs;
use mumble_protocol::control::ControlCodec; use mumble_protocol::control::ControlCodec;
@@ -27,7 +28,10 @@ use tracing::error;
use tracing::info; use tracing::info;
use crate::effects::AudioProcessor; use crate::effects::AudioProcessor;
use crate::imp::AudioSystem; use crate::imp::{
AudioPlayer, AudioPlayerInterface as _, AudioSystem, AudioSystemInterface as _, Platform,
PlatformInterface as _,
};
pub mod app; pub mod app;
mod effects; mod effects;
@@ -47,7 +51,9 @@ pub async fn network_entrypoint(mut event_rx: UnboundedReceiver<Command>) {
*STATE.server.write() = Default::default(); *STATE.server.write() = Default::default();
*STATE.status.write() = ConnectionState::Connecting; *STATE.status.write() = ConnectionState::Connecting;
if let Err(error) = imp::network_connect(address, username, &mut event_rx, &config).await { if let Err(error) =
Platform::network_connect(address, username, &mut event_rx, &config).await
{
error!("could not connect {:?}", error); error!("could not connect {:?}", error);
*STATE.status.write() = ConnectionState::Failed(error.to_string()); *STATE.status.write() = ConnectionState::Failed(error.to_string());
} else { } else {
@@ -56,7 +62,7 @@ pub async fn network_entrypoint(mut event_rx: UnboundedReceiver<Command>) {
} }
} }
pub async fn network_loop<R: imp::ImpRead, W: imp::ImpWrite>( pub async fn network_loop<R: AsyncRead + Unpin + 'static, W: AsyncWrite + Unpin + 'static>(
username: String, username: String,
event_rx: &mut UnboundedReceiver<Command>, event_rx: &mut UnboundedReceiver<Command>,
mut reader: FramedRead<R, ControlCodec<Serverbound, Clientbound>>, mut reader: FramedRead<R, ControlCodec<Serverbound, Clientbound>>,
@@ -105,12 +111,12 @@ pub async fn network_loop<R: imp::ImpRead, W: imp::ImpWrite>(
break; break;
} }
imp::sleep(Duration::from_millis(3000)).await; Platform::sleep(Duration::from_millis(3000)).await;
} }
}); });
} }
let mut audio = imp::AudioSystem::new().await?; let mut audio = AudioSystem::new().await?;
{ {
let send_chan = send_chan.clone(); let send_chan = send_chan.clone();
let mut sequence_num = 0; let mut sequence_num = 0;
@@ -296,8 +302,8 @@ fn accept_command(
fn accept_packet( fn accept_packet(
msg: ControlPacket<mumble_protocol::Clientbound>, msg: ControlPacket<mumble_protocol::Clientbound>,
audio_context: &mut imp::AudioSystem, audio_context: &mut AudioSystem,
player_map: &mut HashMap<u32, imp::AudioPlayer>, player_map: &mut HashMap<u32, AudioPlayer>,
) -> Result<(), Error> { ) -> Result<(), Error> {
match msg { match msg {
ControlPacket::UDPTunnel(u) => { ControlPacket::UDPTunnel(u) => {
+2 -2
View File
@@ -1,6 +1,6 @@
use mumble_web2_gui::{app, imp}; use mumble_web2_gui::{app, imp::Platform, imp::PlatformInterface as _};
pub fn main() { pub fn main() {
imp::init_logging(); Platform::init_logging();
dioxus::launch(app::app); dioxus::launch(app::app);
} }