Refactor the imp/gui bondary to use real traits #18

Merged
liamwarfield merged 8 commits from gui-platform-boundary-refactor into main 2026-02-18 04:53:41 +00:00
4 changed files with 63 additions and 50 deletions
Showing only changes of commit 09985e6031 - Show all commits
+26 -5
View File
@@ -2,8 +2,9 @@
//!
//! This module defines traits that each platform (web, desktop, mobile) must implement.
//! The traits make the platform boundary explicit and provide compile-time verification.
#![allow(async_fn_in_trait)]
use crate::app::Command;
use crate::{app::Command, effects::AudioProcessor};
use color_eyre::eyre::Error;
use dioxus::hooks::UnboundedReceiver;
use mumble_web2_common::{ClientConfig, ServerStatus};
@@ -14,11 +15,31 @@ use std::time::Duration;
// Trait Definitions
// ============================================================================
pub trait AudioSystemInterface {
pub trait AudioSystemInterface: Sized {
liamwarfield marked this conversation as resolved Outdated
Outdated
Review

We need to add a doc comment for the overall trait here.

We need to add a doc comment for the overall trait here.
Outdated
Review

Done

Done
type AudioPlayer: AudioPlayerInterface;
/// Initialize the audio system, including relevant state
async fn new() -> Result<Self, Error>;
/// Set the processor for the microphone input, mainly noise cancellation settings.
fn set_processor(&self, processor: AudioProcessor);
/// Begin listening to microphone input, calling the `each` function with
/// encoded opus frames.
fn start_recording(
&mut self,
each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error>;
/// Begin playback of an audio stream, returning an object that can be passed
/// with opus frames.
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error>;
}
pub trait AudioPlayerInterface {}
pub trait AudioPlayerInterface {
/// Playback an opus frame.
fn play_opus(&mut self, payload: &[u8]);
}
/// This is the main trait that each platform must implement. It combines all
/// platform-specific functionality into a single interface, providing compile-time
@@ -72,9 +93,9 @@ pub trait PlatformInterface {
pub mod web;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod connect;
pub mod connect;
#[cfg(any(feature = "desktop", feature = "mobile"))]
mod native_audio;
pub mod native_audio;
#[cfg(feature = "desktop")]
pub mod desktop;
+29 -31
View File
@@ -56,27 +56,6 @@ fn encode_and_send(
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
impl NativeAudioSystem {
pub async fn new() -> Result<Self, Error> {
// TODO
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(NativeAudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
pub fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn choose_config(
&self,
configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>,
@@ -106,8 +85,33 @@ impl NativeAudioSystem {
.cloned()
.ok_or(eyre!("no supported stream configs"))
}
}
pub fn start_recording(
impl super::AudioSystemInterface for NativeAudioSystem {
type AudioPlayer = NativeAudioPlayer;
async fn new() -> Result<Self, Error> {
// TODO
let host = cpal::default_host();
let name = host.id();
let processors = AudioProcessorSender::default();
Ok(NativeAudioSystem {
output: host
.default_output_device()
.ok_or(eyre!("no output devices from {name:?}"))?,
input: host
.default_input_device()
.ok_or(eyre!("no input devices from {name:?}"))?,
processors,
recording_stream: None,
})
}
fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
fn start_recording(
&mut self,
mut each: impl FnMut(Vec<u8>, bool) + Send + 'static,
) -> Result<(), Error> {
@@ -148,7 +152,7 @@ impl NativeAudioSystem {
}
}
pub fn create_player(&mut self) -> Result<NativeAudioPlayer, Error> {
fn create_player(&mut self) -> Result<NativeAudioPlayer, Error> {
let config = self.choose_config(self.output.supported_output_configs()?)?;
info!(
"creating player on {:?} with {:#?}",
@@ -195,10 +199,6 @@ impl NativeAudioSystem {
}
}
impl super::AudioSystemInterface for NativeAudioSystem {
type AudioPlayer = NativeAudioPlayer;
}
pub struct NativeAudioPlayer {
decoder: opus::Decoder,
stream: cpal::Stream,
@@ -206,8 +206,8 @@ pub struct NativeAudioPlayer {
tmp: Vec<i16>,
}
impl NativeAudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) {
impl super::AudioPlayerInterface for NativeAudioPlayer {
fn play_opus(&mut self, payload: &[u8]) {
let len = match self.decoder.decode(payload, &mut self.tmp, false) {
Ok(l) => l,
Err(e) => {
@@ -228,5 +228,3 @@ impl NativeAudioPlayer {
}
}
}
impl super::AudioPlayerInterface for NativeAudioPlayer {}
+6 -13
View File
@@ -177,10 +177,8 @@ async fn attach_worklet(audio_context: &AudioContext) -> Result<(), Error> {
impl super::AudioSystemInterface for WebAudioSystem {
type AudioPlayer = WebAudioPlayer;
}
impl WebAudioSystem {
pub async fn new() -> Result<Self, Error> {
async fn new() -> Result<Self, Error> {
// Create MediaStreams to playback decoded audio
// The audio context is used to reproduce audio.
let webctx = configure_audio_context();
@@ -191,14 +189,11 @@ impl WebAudioSystem {
Ok(WebAudioSystem { webctx, processors })
}
pub fn set_processor(&self, processor: AudioProcessor) {
fn set_processor(&self, processor: AudioProcessor) {
self.processors.store(Some(processor))
}
pub fn start_recording(
&mut self,
each: impl FnMut(Vec<u8>, bool) + 'static,
) -> Result<(), Error> {
fn start_recording(&mut self, each: impl FnMut(Vec<u8>, bool) + 'static) -> Result<(), Error> {
let audio_context_worklet = self.webctx.clone();
let processors = self.processors.clone();
spawn(async move {
@@ -210,7 +205,7 @@ impl WebAudioSystem {
Ok(())
}
pub fn create_player(&mut self) -> Result<WebAudioPlayer, Error> {
fn create_player(&mut self) -> Result<WebAudioPlayer, Error> {
let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
// Connect worklet to destination
@@ -269,10 +264,8 @@ impl WebAudioSystem {
pub struct WebAudioPlayer(AudioDecoder);
impl super::AudioPlayerInterface for WebAudioPlayer {}
impl WebAudioPlayer {
pub fn play_opus(&mut self, payload: &[u8]) {
impl super::AudioPlayerInterface for WebAudioPlayer {
fn play_opus(&mut self, payload: &[u8]) {
let js_audio_payload = Uint8Array::from(payload);
let _ = self.0.decode(
&EncodedAudioChunk::new(&EncodedAudioChunkInit::new(
+2 -1
View File
@@ -29,7 +29,8 @@ use tracing::info;
use crate::effects::AudioProcessor;
use crate::imp::{
AudioPlayer, AudioSystem, AudioSystemInterface as _, Platform, PlatformInterface as _,
AudioPlayer, AudioPlayerInterface as _, AudioSystem, AudioSystemInterface as _, Platform,
PlatformInterface as _,
};
pub mod app;