Split into gui and client crates (#30)
Reviewed-on: #30 Reviewed-by: restitux <restitux@ohea.xyz> Co-authored-by: Sam Sartor <me@samsartor.com> Co-committed-by: Sam Sartor <me@samsartor.com>
This commit was merged in pull request #30.
This commit is contained in:
@@ -0,0 +1,91 @@
|
||||
use crate::app::Command;
|
||||
use color_eyre::eyre::Error;
|
||||
use dioxus::hooks::UnboundedReceiver;
|
||||
use mumble_web2_common::{ClientConfig, ServerStatus};
|
||||
use std::future::Future;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Mobile platform implementation using Tokio, native audio, and Android permissions.
|
||||
pub struct MobilePlatform;
|
||||
|
||||
impl super::PlatformInterface for MobilePlatform {
|
||||
type AudioSystem = super::native_audio::NativeAudioSystem;
|
||||
|
||||
async fn load_config() -> color_eyre::Result<ClientConfig> {
|
||||
Ok(ClientConfig {
|
||||
proxy_url: None,
|
||||
cert_hash: None,
|
||||
any_server: true,
|
||||
})
|
||||
}
|
||||
|
||||
fn load_username() -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn load_server_url() -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn set_default_username(_username: &str) -> Option<()> {
|
||||
None
|
||||
}
|
||||
|
||||
fn set_default_server(server: &str) -> Option<()> {
|
||||
None
|
||||
}
|
||||
|
||||
async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
gui_config: &ClientConfig,
|
||||
) -> Result<(), Error> {
|
||||
super::connect::network_connect(address, username, event_rx, gui_config).await
|
||||
}
|
||||
|
||||
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
|
||||
super::connect::get_status(client).await
|
||||
}
|
||||
|
||||
fn init_logging() {
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::filter::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy();
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_target(true)
|
||||
.with_level(true)
|
||||
.with_env_filter(env_filter)
|
||||
.init();
|
||||
}
|
||||
|
||||
fn request_permissions() {
|
||||
request_recording_permission();
|
||||
}
|
||||
|
||||
async fn sleep(duration: Duration) {
|
||||
tokio::time::sleep(duration).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "android"))]
|
||||
pub fn request_recording_permission() {}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
pub fn request_recording_permission() {
|
||||
use android_permissions::{PermissionManager, RECORD_AUDIO};
|
||||
use jni::{objects::JObject, JavaVM};
|
||||
|
||||
let ctx = ndk_context::android_context();
|
||||
let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() };
|
||||
let activity = unsafe { JObject::from_raw(ctx.context().cast()) };
|
||||
|
||||
let manager = PermissionManager::create(vm, activity).unwrap();
|
||||
if !manager.check(&RECORD_AUDIO).unwrap() {
|
||||
manager.request(&[&RECORD_AUDIO]).unwrap();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
use crate::app::{Command, SharedState};
|
||||
use color_eyre::eyre::{bail, Error};
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use mumble_protocol::control::ClientControlCodec;
|
||||
use std::net::ToSocketAddrs;
|
||||
use std::sync::Arc;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_rustls::rustls;
|
||||
use tokio_rustls::rustls::client::danger::{HandshakeSignatureValid, ServerCertVerifier};
|
||||
use tokio_rustls::rustls::pki_types::{CertificateDer, ServerName, UnixTime};
|
||||
use tokio_rustls::rustls::ClientConfig;
|
||||
use tokio_rustls::rustls::DigitallySignedStruct;
|
||||
use tokio_rustls::TlsConnector;
|
||||
use tokio_util::compat::{TokioAsyncReadCompatExt as _, TokioAsyncWriteCompatExt as _};
|
||||
use tracing::{info, instrument};
|
||||
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct NoCertificateVerification;
|
||||
|
||||
impl ServerCertVerifier for NoCertificateVerification {
|
||||
fn verify_server_cert(
|
||||
&self,
|
||||
_end_entity: &CertificateDer<'_>,
|
||||
_intermediates: &[CertificateDer<'_>],
|
||||
_server_name: &ServerName<'_>,
|
||||
_ocsp: &[u8],
|
||||
_now: UnixTime,
|
||||
) -> Result<rustls::client::danger::ServerCertVerified, rustls::Error> {
|
||||
Ok(rustls::client::danger::ServerCertVerified::assertion())
|
||||
}
|
||||
|
||||
fn verify_tls12_signature(
|
||||
&self,
|
||||
_message: &[u8],
|
||||
_cert: &CertificateDer<'_>,
|
||||
_dss: &DigitallySignedStruct,
|
||||
) -> Result<HandshakeSignatureValid, rustls::Error> {
|
||||
Ok(HandshakeSignatureValid::assertion())
|
||||
}
|
||||
|
||||
fn verify_tls13_signature(
|
||||
&self,
|
||||
_message: &[u8],
|
||||
_cert: &CertificateDer<'_>,
|
||||
_dss: &DigitallySignedStruct,
|
||||
) -> Result<HandshakeSignatureValid, rustls::Error> {
|
||||
Ok(HandshakeSignatureValid::assertion())
|
||||
}
|
||||
|
||||
fn supported_verify_schemes(&self) -> Vec<rustls::SignatureScheme> {
|
||||
vec![
|
||||
rustls::SignatureScheme::RSA_PKCS1_SHA1,
|
||||
rustls::SignatureScheme::ECDSA_SHA1_Legacy,
|
||||
rustls::SignatureScheme::RSA_PKCS1_SHA256,
|
||||
rustls::SignatureScheme::ECDSA_NISTP256_SHA256,
|
||||
rustls::SignatureScheme::RSA_PKCS1_SHA384,
|
||||
rustls::SignatureScheme::ECDSA_NISTP384_SHA384,
|
||||
rustls::SignatureScheme::RSA_PKCS1_SHA512,
|
||||
rustls::SignatureScheme::ECDSA_NISTP521_SHA512,
|
||||
rustls::SignatureScheme::RSA_PSS_SHA256,
|
||||
rustls::SignatureScheme::RSA_PSS_SHA384,
|
||||
rustls::SignatureScheme::RSA_PSS_SHA512,
|
||||
rustls::SignatureScheme::ED25519,
|
||||
rustls::SignatureScheme::ED448,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> Result<(), Error> {
|
||||
info!("connecting");
|
||||
|
||||
let config = ClientConfig::builder()
|
||||
.dangerous()
|
||||
.with_custom_certificate_verifier(Arc::new(NoCertificateVerification))
|
||||
.with_no_client_auth();
|
||||
|
||||
let connector = TlsConnector::from(Arc::new(config));
|
||||
|
||||
let addr = format!("{}:{}", address, 64738)
|
||||
.to_socket_addrs()?
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
let server_tcp = TcpStream::connect(addr).await?;
|
||||
let server_stream = connector
|
||||
//.connect("127.0.0.1".try_into()?, server_tcp)
|
||||
.connect(address.try_into()?, server_tcp)
|
||||
.await?;
|
||||
let (read_server, write_server) = tokio::io::split(server_stream);
|
||||
|
||||
let read_codec = ClientControlCodec::new();
|
||||
let write_codec = ClientControlCodec::new();
|
||||
|
||||
let reader = asynchronous_codec::FramedRead::new(read_server.compat(), read_codec);
|
||||
let writer = asynchronous_codec::FramedWrite::new(write_server.compat_write(), write_codec);
|
||||
|
||||
let (outgoing_send, outgoing_recv) = futures_channel::mpsc::unbounded();
|
||||
spawn(crate::sender_loop(outgoing_recv, writer));
|
||||
crate::network_loop(username, state, event_rx, outgoing_send, reader).await
|
||||
}
|
||||
|
||||
pub async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
|
||||
bail!("status not supported on desktop yet")
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub use tokio::spawn;
|
||||
#[allow(unused)]
|
||||
pub type SpawnHandle = tokio::runtime::Handle;
|
||||
@@ -0,0 +1,58 @@
|
||||
use crate::app::{Command, SharedState};
|
||||
use color_eyre::eyre::Error;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
use std::time::Duration;
|
||||
|
||||
/// Desktop platform implementation using Tokio and native audio.
|
||||
pub struct DesktopPlatform;
|
||||
|
||||
impl super::PlatformInterface for DesktopPlatform {
|
||||
type AudioSystem = super::native_audio::NativeAudioSystem;
|
||||
type ConfigSystem = super::native_config::NativeConfigSystem;
|
||||
|
||||
async fn sleep(duration: Duration) {
|
||||
tokio::time::sleep(duration).await;
|
||||
}
|
||||
|
||||
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
|
||||
Ok(ProxyOverrides {
|
||||
proxy_url: None,
|
||||
cert_hash: None,
|
||||
any_server: true,
|
||||
})
|
||||
}
|
||||
|
||||
async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> Result<(), Error> {
|
||||
super::connect::network_connect(address, username, event_rx, overrides, state).await
|
||||
}
|
||||
|
||||
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
|
||||
super::connect::get_status(client).await
|
||||
}
|
||||
|
||||
fn init_logging() {
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::filter::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy();
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_target(true)
|
||||
.with_level(true)
|
||||
.with_env_filter(env_filter)
|
||||
.init();
|
||||
}
|
||||
|
||||
fn request_permissions() {
|
||||
// No-op on desktop
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
use crate::app::{Command, SharedState};
|
||||
use color_eyre::eyre::Error;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
use std::time::Duration;
|
||||
|
||||
/// Mobile platform implementation using Tokio, native audio, and Android permissions.
|
||||
pub struct MobilePlatform;
|
||||
|
||||
impl super::PlatformInterface for MobilePlatform {
|
||||
type AudioSystem = super::native_audio::NativeAudioSystem;
|
||||
type ConfigSystem = super::native_config::NativeConfigSystem;
|
||||
|
||||
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
|
||||
Ok(ProxyOverrides {
|
||||
proxy_url: None,
|
||||
cert_hash: None,
|
||||
any_server: true,
|
||||
})
|
||||
}
|
||||
|
||||
async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> Result<(), Error> {
|
||||
super::connect::network_connect(address, username, event_rx, overrides, state).await
|
||||
}
|
||||
|
||||
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
|
||||
super::connect::get_status(client).await
|
||||
}
|
||||
|
||||
fn init_logging() {
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::filter::EnvFilter;
|
||||
|
||||
let env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy();
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_target(true)
|
||||
.with_level(true)
|
||||
.with_env_filter(env_filter)
|
||||
.init();
|
||||
}
|
||||
|
||||
fn request_permissions() {
|
||||
request_recording_permission();
|
||||
}
|
||||
|
||||
async fn sleep(duration: Duration) {
|
||||
tokio::time::sleep(duration).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "android"))]
|
||||
pub fn request_recording_permission() {}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
pub fn request_recording_permission() {
|
||||
use android_permissions::{PermissionManager, RECORD_AUDIO};
|
||||
use jni::{objects::JObject, JavaVM};
|
||||
|
||||
let ctx = ndk_context::android_context();
|
||||
let vm = unsafe { JavaVM::from_raw(ctx.vm().cast()).unwrap() };
|
||||
let activity = unsafe { JObject::from_raw(ctx.context().cast()) };
|
||||
|
||||
let manager = PermissionManager::create(vm, activity).unwrap();
|
||||
if !manager.check(&RECORD_AUDIO).unwrap() {
|
||||
manager.request(&[&RECORD_AUDIO]).unwrap();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
//! Platform abstraction layer
|
||||
//!
|
||||
//! This module defines traits that each platform (web, desktop, mobile) must implement.
|
||||
//! The traits make the platform boundary explicit and provide compile-time verification.
|
||||
#![allow(async_fn_in_trait)]
|
||||
|
||||
use crate::app::{Command, SharedState};
|
||||
use crate::effects::AudioProcessor;
|
||||
use color_eyre::eyre::Error;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
use std::collections::HashMap;
|
||||
use std::future::Future;
|
||||
use std::time::Duration;
|
||||
|
||||
// ============================================================================
|
||||
// Trait Definitions
|
||||
// ============================================================================
|
||||
|
||||
/// Platform-specific audio subsystem for capturing microphone input and creating playback streams.
|
||||
///
|
||||
/// The audio system handles Opus encoding internally - callers receive encoded frames
|
||||
/// ready for network transmission.
|
||||
pub trait AudioSystemInterface: Sized {
|
||||
/// The player type returned by [`create_player`](Self::create_player).
|
||||
type AudioPlayer: AudioPlayerInterface;
|
||||
|
||||
/// Initialize the audio system.
|
||||
async fn new() -> Result<Self, Error>;
|
||||
|
||||
/// Set the processor for the microphone input, mainly noise cancellation settings.
|
||||
fn set_processor(&self, processor: AudioProcessor);
|
||||
|
||||
/// Begin listening to microphone input, calling the `each` function with
|
||||
/// encoded opus frames.
|
||||
fn start_recording(
|
||||
&mut self,
|
||||
each: impl FnMut(Vec<u8>, bool) + Send + 'static,
|
||||
) -> Result<(), Error>;
|
||||
|
||||
/// Begin playback of an audio stream, returning an object that can be passed opus frames.
|
||||
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error>;
|
||||
}
|
||||
|
||||
/// A handle to an active audio playback stream for a single remote user.
|
||||
///
|
||||
/// Each connected user gets their own `AudioPlayer` instance, which decodes
|
||||
/// incoming Opus frames and outputs PCM audio to the platform's audio device.
|
||||
/// The player manages its own decoder state and output buffer.
|
||||
pub trait AudioPlayerInterface {
|
||||
/// Decode and play an Opus-encoded audio frame.
|
||||
fn play_opus(&mut self, payload: &[u8]);
|
||||
}
|
||||
|
||||
pub trait ConfigSystemInterface: Sized + Clone {
|
||||
fn new() -> Result<Self, Error>;
|
||||
|
||||
fn config_get<T>(&self, key: &str) -> Option<T>
|
||||
where
|
||||
T: serde::de::DeserializeOwned;
|
||||
|
||||
fn config_set<T>(&self, key: &str, value: &T)
|
||||
where
|
||||
T: serde::Serialize;
|
||||
}
|
||||
|
||||
/// This is the main trait that each platform must implement. It combines all
|
||||
/// platform-specific functionality into a single interface, providing compile-time
|
||||
/// verification that all platforms implement the required functionality.
|
||||
pub trait PlatformInterface {
|
||||
type AudioSystem: AudioSystemInterface;
|
||||
type ConfigSystem: ConfigSystemInterface;
|
||||
|
||||
/// Initialize logging for the platform.
|
||||
fn init_logging();
|
||||
|
||||
/// Request runtime permissions (Android audio recording, etc.).
|
||||
fn request_permissions();
|
||||
|
||||
/// Establish a connection to the Mumble server and run the network loop.
|
||||
fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
proxy_overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> impl Future<Output = Result<(), Error>>;
|
||||
|
||||
/// Get server status (user count, version, etc.).
|
||||
fn get_status(
|
||||
client: &reqwest::Client,
|
||||
) -> impl Future<Output = color_eyre::Result<ServerStatus>>;
|
||||
|
||||
/// Load the proxy overrides (proxy URL, cert hash, etc.).
|
||||
fn load_proxy_overrides() -> impl Future<Output = color_eyre::Result<ProxyOverrides>>;
|
||||
|
||||
/// Async sleep for the given duration.
|
||||
fn sleep(duration: Duration) -> impl Future<Output = ()>;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Platform Modules
|
||||
// ============================================================================
|
||||
|
||||
mod stub;
|
||||
|
||||
#[cfg(any(feature = "desktop", feature = "mobile"))]
|
||||
mod connect;
|
||||
#[cfg(any(feature = "desktop", feature = "mobile"))]
|
||||
mod native_audio;
|
||||
#[cfg(any(feature = "desktop", feature = "mobile"))]
|
||||
mod native_config;
|
||||
|
||||
#[cfg(feature = "desktop")]
|
||||
mod desktop;
|
||||
|
||||
#[cfg(feature = "mobile")]
|
||||
mod mobile;
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
mod web;
|
||||
|
||||
// ============================================================================
|
||||
// Platform Type Alias
|
||||
// ============================================================================
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
pub type Platform = web::WebPlatform;
|
||||
|
||||
#[cfg(all(feature = "desktop", not(feature = "web")))]
|
||||
pub type Platform = desktop::DesktopPlatform;
|
||||
|
||||
#[cfg(all(feature = "mobile", not(feature = "web"), not(feature = "desktop")))]
|
||||
pub type Platform = mobile::MobilePlatform;
|
||||
|
||||
#[cfg(all(
|
||||
not(feature = "mobile"),
|
||||
not(feature = "web"),
|
||||
not(feature = "desktop")
|
||||
))]
|
||||
pub type Platform = stub::StubPlatform;
|
||||
|
||||
pub type AudioSystem = <Platform as PlatformInterface>::AudioSystem;
|
||||
pub type AudioPlayer = <AudioSystem as AudioSystemInterface>::AudioPlayer;
|
||||
|
||||
pub type ConfigSystem = <Platform as PlatformInterface>::ConfigSystem;
|
||||
|
||||
// ========================
|
||||
// Platform Async Runtime
|
||||
// ========================
|
||||
|
||||
// Note: these can not be part of the Platform because they differ in Send requiremets
|
||||
#[cfg(all(any(feature = "desktop", feature = "mobile"), not(feature = "web")))]
|
||||
pub use connect::{spawn, SpawnHandle};
|
||||
#[cfg(all(
|
||||
not(feature = "desktop"),
|
||||
not(feature = "mobile"),
|
||||
not(feature = "web")
|
||||
))]
|
||||
pub use stub::{spawn, SpawnHandle};
|
||||
#[cfg(feature = "web")]
|
||||
pub use web::{spawn, SpawnHandle};
|
||||
|
||||
// =======================
|
||||
// Compile-time Assertions
|
||||
// =======================
|
||||
const _: () = {
|
||||
fn assert_platform<T: PlatformInterface>() {}
|
||||
|
||||
// Check each implementation, and prevent warnings that the implementations are unused.
|
||||
#[cfg(feature = "web")]
|
||||
let _ = assert_platform::<web::WebPlatform>;
|
||||
#[cfg(feature = "desktop")]
|
||||
let _ = assert_platform::<desktop::DesktopPlatform>;
|
||||
#[cfg(feature = "mobile")]
|
||||
let _ = assert_platform::<mobile::MobilePlatform>;
|
||||
let _ = assert_platform::<stub::StubPlatform>;
|
||||
};
|
||||
|
||||
fn global_default_config() -> HashMap<String, serde_json::Value> {
|
||||
serde_json::json!({})
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.clone()
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
@@ -0,0 +1,219 @@
|
||||
use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
|
||||
use color_eyre::eyre::{eyre, Error};
|
||||
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
|
||||
use std::mem::replace;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
pub struct NativeAudioSystem {
|
||||
output: cpal::Device,
|
||||
input: cpal::Device,
|
||||
processors: AudioProcessorSender,
|
||||
recording_stream: Option<cpal::Stream>,
|
||||
}
|
||||
|
||||
const SAMPLE_RATE: u32 = 48_000;
|
||||
const PACKET_SAMPLES: u32 = 960;
|
||||
// Divide by 1000 to get samples per ms, then multiply by 60ms for max Opus frame size.
|
||||
const MAX_DECODE_SAMPLES: usize = SAMPLE_RATE as usize / 1000 * 60;
|
||||
|
||||
fn encode_and_send(
|
||||
state: TransmitState,
|
||||
output_buffer: &mut Vec<f32>,
|
||||
encoder: &mut opus::Encoder,
|
||||
each: &mut impl FnMut(Vec<u8>, bool),
|
||||
) {
|
||||
let (is_terminator, should_encode) = match state {
|
||||
TransmitState::Silent => return,
|
||||
TransmitState::Transmitting => (false, output_buffer.len() >= PACKET_SAMPLES as usize),
|
||||
TransmitState::Terminator => {
|
||||
output_buffer.resize(PACKET_SAMPLES as usize, 0.0);
|
||||
(true, true)
|
||||
}
|
||||
};
|
||||
|
||||
if should_encode {
|
||||
let remainder = output_buffer.split_off(PACKET_SAMPLES as usize);
|
||||
let frame = replace(output_buffer, remainder);
|
||||
match encoder.encode_vec_float(&frame, frame.len() * 2) {
|
||||
Ok(encoded) => each(encoded, is_terminator),
|
||||
Err(e) => error!("error encoding {} samples: {e:?}", frame.len()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Buffer = Arc<Mutex<dasp_ring_buffer::Bounded<Vec<i16>>>>;
|
||||
|
||||
impl NativeAudioSystem {
|
||||
fn choose_config(
|
||||
&self,
|
||||
configs: impl Iterator<Item = cpal::SupportedStreamConfigRange>,
|
||||
) -> Result<cpal::StreamConfig, Error> {
|
||||
let mut supported_configs: Vec<_> = configs
|
||||
.filter_map(|cfg| cfg.try_with_sample_rate(cpal::SampleRate(SAMPLE_RATE)))
|
||||
.filter(|cfg| cfg.sample_format() == cpal::SampleFormat::I16)
|
||||
.map(|cfg| cpal::StreamConfig {
|
||||
buffer_size: cpal::BufferSize::Fixed(match *cfg.buffer_size() {
|
||||
cpal::SupportedBufferSize::Range { min, max } => 480.clamp(min, max),
|
||||
cpal::SupportedBufferSize::Unknown => 480,
|
||||
}),
|
||||
..cfg.config()
|
||||
})
|
||||
.collect();
|
||||
supported_configs.sort_by(|a, b| {
|
||||
let cpal::BufferSize::Fixed(a_buf) = a.buffer_size else {
|
||||
unreachable!()
|
||||
};
|
||||
let cpal::BufferSize::Fixed(b_buf) = b.buffer_size else {
|
||||
unreachable!()
|
||||
};
|
||||
Ord::cmp(&a.channels, &b.channels).then(Ord::cmp(&a_buf, &b_buf))
|
||||
});
|
||||
supported_configs
|
||||
.get(0)
|
||||
.cloned()
|
||||
.ok_or(eyre!("no supported stream configs"))
|
||||
}
|
||||
}
|
||||
|
||||
impl super::AudioSystemInterface for NativeAudioSystem {
|
||||
type AudioPlayer = NativeAudioPlayer;
|
||||
|
||||
async fn new() -> Result<Self, Error> {
|
||||
let host = cpal::default_host();
|
||||
let name = host.id();
|
||||
let processors = AudioProcessorSender::default();
|
||||
Ok(NativeAudioSystem {
|
||||
output: host
|
||||
.default_output_device()
|
||||
.ok_or(eyre!("no output devices from {name:?}"))?,
|
||||
input: host
|
||||
.default_input_device()
|
||||
.ok_or(eyre!("no input devices from {name:?}"))?,
|
||||
processors,
|
||||
recording_stream: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn set_processor(&self, processor: AudioProcessor) {
|
||||
self.processors.store(Some(processor))
|
||||
}
|
||||
|
||||
fn start_recording(
|
||||
&mut self,
|
||||
mut each: impl FnMut(Vec<u8>, bool) + Send + 'static,
|
||||
) -> Result<(), Error> {
|
||||
let config = self.choose_config(self.input.supported_input_configs()?)?;
|
||||
info!(
|
||||
"creating recording on {:?} with {:#?}",
|
||||
self.input.name()?,
|
||||
config
|
||||
);
|
||||
let mut encoder =
|
||||
opus::Encoder::new(SAMPLE_RATE, opus::Channels::Mono, opus::Application::Voip)?;
|
||||
let mut current_processor = AudioProcessor::new(false);
|
||||
let mut output_buffer = Vec::new();
|
||||
let processors = self.processors.clone();
|
||||
let error_callback = move |e: cpal::StreamError| error!("error recording: {e:?}");
|
||||
let data_callback = move |frame: &[f32], _: &cpal::InputCallbackInfo| {
|
||||
if let Some(new_processor) = processors.take() {
|
||||
current_processor = new_processor;
|
||||
}
|
||||
let state =
|
||||
current_processor.process(frame, config.channels as usize, &mut output_buffer);
|
||||
encode_and_send(state, &mut output_buffer, &mut encoder, &mut each);
|
||||
};
|
||||
|
||||
match self
|
||||
.input
|
||||
.build_input_stream(&config, data_callback, error_callback, None)
|
||||
{
|
||||
Ok(stream) => {
|
||||
stream.play()?;
|
||||
self.recording_stream = Some(stream);
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => {
|
||||
self.recording_stream = None;
|
||||
Err(err.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_player(&mut self) -> Result<NativeAudioPlayer, Error> {
|
||||
let config = self.choose_config(self.output.supported_output_configs()?)?;
|
||||
info!(
|
||||
"creating player on {:?} with {:#?}",
|
||||
self.output.name().ok(),
|
||||
&config
|
||||
);
|
||||
let buffer = Arc::new(Mutex::new(dasp_ring_buffer::Bounded::from_raw_parts(
|
||||
0,
|
||||
0,
|
||||
vec![
|
||||
0;
|
||||
SAMPLE_RATE as usize/4 // 250ms of buffer
|
||||
],
|
||||
)));
|
||||
let decoder = opus::Decoder::new(SAMPLE_RATE, opus::Channels::Mono)?;
|
||||
let stream = {
|
||||
let buffer = buffer.clone();
|
||||
self.output.build_output_stream(
|
||||
&config,
|
||||
move |frame, _info| {
|
||||
let mut buffer = buffer.lock().unwrap();
|
||||
for x in frame.chunks_mut(config.channels as usize) {
|
||||
match buffer.pop() {
|
||||
Some(y) => {
|
||||
x.fill(y);
|
||||
}
|
||||
None => {
|
||||
x.fill(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
move |err| error!("could not create output stream {err:?}"),
|
||||
None,
|
||||
)?
|
||||
};
|
||||
stream.play()?;
|
||||
Ok(NativeAudioPlayer {
|
||||
decoder,
|
||||
stream,
|
||||
buffer,
|
||||
tmp: vec![0; MAX_DECODE_SAMPLES],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NativeAudioPlayer {
|
||||
decoder: opus::Decoder,
|
||||
stream: cpal::Stream,
|
||||
buffer: Buffer,
|
||||
tmp: Vec<i16>,
|
||||
}
|
||||
|
||||
impl super::AudioPlayerInterface for NativeAudioPlayer {
|
||||
fn play_opus(&mut self, payload: &[u8]) {
|
||||
let len = match self.decoder.decode(payload, &mut self.tmp, false) {
|
||||
Ok(l) => l,
|
||||
Err(e) => {
|
||||
error!("opus decode error {e:?}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut buffer = self.buffer.lock().unwrap();
|
||||
let mut overrun = 0;
|
||||
for x in &self.tmp[..len] {
|
||||
if let Some(_) = buffer.push(*x) {
|
||||
overrun += 1;
|
||||
}
|
||||
}
|
||||
if overrun > 0 {
|
||||
warn!("playback overrun by {overrun} samples");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
use color_eyre::eyre::Error;
|
||||
use std::collections::HashMap;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct NativeConfigSystem {
|
||||
config_path: std::path::PathBuf,
|
||||
}
|
||||
|
||||
impl super::ConfigSystemInterface for NativeConfigSystem {
|
||||
fn new() -> color_eyre::Result<Self, Error> {
|
||||
return Ok(NativeConfigSystem {
|
||||
config_path: get_config_path()?,
|
||||
});
|
||||
}
|
||||
|
||||
fn config_get<T>(&self, key: &str) -> Option<T>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
let config = load_config_map(&self.config_path);
|
||||
|
||||
let Some(value_untyped) = config.get(key).cloned().or_else(|| config_get_default(key))
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
|
||||
match serde_json::from_value::<T>(value_untyped) {
|
||||
Ok(v) => Some(v),
|
||||
Err(_) => {
|
||||
let default_value = config_get_default(key)
|
||||
.expect("Default value required after config parse failure");
|
||||
Some(
|
||||
serde_json::from_value::<T>(default_value)
|
||||
.expect("Default value could not be parsed"),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn config_set<T>(&self, key: &str, value: &T)
|
||||
where
|
||||
T: serde::Serialize,
|
||||
{
|
||||
let mut config = load_config_map(&self.config_path);
|
||||
let json_value = serde_json::to_value(value).expect("failed to serialize config value");
|
||||
config.insert(key.to_string(), json_value);
|
||||
save_config_map(&config).expect("failed to set config")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "desktop"))]
|
||||
fn get_config_path() -> color_eyre::Result<std::path::PathBuf> {
|
||||
use etcetera::{choose_app_strategy, AppStrategy, AppStrategyArgs};
|
||||
|
||||
let strategy = choose_app_strategy(AppStrategyArgs {
|
||||
top_level_domain: "xyz".to_string(),
|
||||
author: "ohea".to_string(),
|
||||
app_name: "Mumble Web2".to_string(),
|
||||
})
|
||||
.expect("failed to choose app strategy");
|
||||
Ok(strategy.config_dir().join("config.json"))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
fn get_config_path() -> color_eyre::Result<std::path::PathBuf> {
|
||||
let ctx = ndk_context::android_context();
|
||||
let vm = unsafe { jni::JavaVM::from_raw(ctx.vm().cast()) }?;
|
||||
let mut env = vm.attach_current_thread()?;
|
||||
let ctx = unsafe { jni::objects::JObject::from_raw(ctx.context().cast()) };
|
||||
let cache_dir = env
|
||||
.call_method(ctx, "getFilesDir", "()Ljava/io/File;", &[])?
|
||||
.l()?;
|
||||
let cache_dir: jni::objects::JString = env
|
||||
.call_method(&cache_dir, "toString", "()Ljava/lang/String;", &[])?
|
||||
.l()?
|
||||
.try_into()?;
|
||||
let cache_dir = env.get_string(&cache_dir)?;
|
||||
let cache_dir = cache_dir.to_str()?;
|
||||
Ok(std::path::PathBuf::from(cache_dir).join("config.json"))
|
||||
}
|
||||
|
||||
fn load_config_map(config_path: &std::path::PathBuf) -> HashMap<String, serde_json::Value> {
|
||||
match std::fs::read_to_string(config_path) {
|
||||
Ok(contents) => serde_json::from_str(&contents).unwrap_or_default(),
|
||||
Err(_) => HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn save_config_map(config: &HashMap<String, serde_json::Value>) -> color_eyre::Result<()> {
|
||||
let config_path = get_config_path().expect("Could not get config file path.");
|
||||
if let Some(parent) = config_path.parent() {
|
||||
info!("Creating config directory: {}", parent.display());
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
let contents = serde_json::to_string_pretty(config)?;
|
||||
info!("Writing config to {}", config_path.display());
|
||||
std::fs::write(&config_path, contents)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn config_get_default(key: &str) -> Option<serde_json::Value> {
|
||||
let default_config = platform_default_config();
|
||||
default_config
|
||||
.get(key)
|
||||
.cloned()
|
||||
.or(super::global_default_config().get(key).cloned())
|
||||
}
|
||||
|
||||
fn platform_default_config() -> HashMap<String, serde_json::Value> {
|
||||
serde_json::json!({})
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.clone()
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
@@ -0,0 +1,128 @@
|
||||
/// Stub implementation of the platform interface, so that we can
|
||||
/// `cargo check` without any --feature flags.
|
||||
use crate::{app::SharedState, effects::AudioProcessor};
|
||||
use color_eyre::eyre::Error;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
use std::future::Future;
|
||||
|
||||
pub struct StubPlatform;
|
||||
|
||||
impl super::PlatformInterface for StubPlatform {
|
||||
type AudioSystem = StubAudioSystem;
|
||||
type ConfigSystem = StubConfigSystem;
|
||||
|
||||
fn init_logging() {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn request_permissions() {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn network_connect(
|
||||
_address: String,
|
||||
_username: String,
|
||||
_event_rx: &mut UnboundedReceiver<crate::app::Command>,
|
||||
_overrides: &ProxyOverrides,
|
||||
_state: SharedState,
|
||||
) -> impl Future<Output = Result<(), Error>> {
|
||||
async { panic!("stubbed platform") }
|
||||
}
|
||||
|
||||
fn get_status(
|
||||
_client: &reqwest::Client,
|
||||
) -> impl Future<Output = color_eyre::Result<ServerStatus>> {
|
||||
async { panic!("stubbed platform") }
|
||||
}
|
||||
|
||||
fn load_proxy_overrides() -> impl Future<Output = color_eyre::Result<ProxyOverrides>> {
|
||||
async { panic!("stubbed platform") }
|
||||
}
|
||||
|
||||
fn sleep(_duration: std::time::Duration) -> impl Future<Output = ()> {
|
||||
async { panic!("stubbed platform") }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StubAudioSystem;
|
||||
|
||||
impl super::AudioSystemInterface for StubAudioSystem {
|
||||
type AudioPlayer = StubAudioPlayer;
|
||||
|
||||
async fn new() -> Result<Self, Error> {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn set_processor(&self, _processor: AudioProcessor) {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn start_recording(
|
||||
&mut self,
|
||||
_each: impl FnMut(Vec<u8>, bool) + Send + 'static,
|
||||
) -> Result<(), Error> {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn create_player(&mut self) -> Result<Self::AudioPlayer, Error> {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StubAudioPlayer;
|
||||
|
||||
impl super::AudioPlayerInterface for StubAudioPlayer {
|
||||
fn play_opus(&mut self, _payload: &[u8]) {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct StubConfigSystem;
|
||||
|
||||
impl super::ConfigSystemInterface for StubConfigSystem {
|
||||
fn new() -> Result<Self, Error> {
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn config_get<T>(&self, key: &str) -> Option<T>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
fn config_set<T>(&self, key: &str, value: &T)
|
||||
where
|
||||
T: serde::Serialize,
|
||||
{
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub struct SpawnHandle;
|
||||
|
||||
impl SpawnHandle {
|
||||
#[allow(unused)]
|
||||
pub fn spawn<F>(&self, _future: F)
|
||||
where
|
||||
F: Future<Output = ()> + 'static,
|
||||
{
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn current() -> Self {
|
||||
SpawnHandle
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn spawn<F>(_future: F)
|
||||
where
|
||||
F: Future<Output = ()> + 'static,
|
||||
{
|
||||
panic!("stubbed platform")
|
||||
}
|
||||
@@ -0,0 +1,571 @@
|
||||
use crate::app::{Command, SharedState};
|
||||
use crate::effects::{AudioProcessor, AudioProcessorSender, TransmitState};
|
||||
use color_eyre::eyre::{bail, eyre, Error};
|
||||
use crossbeam::atomic::AtomicCell;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use gloo_timers::future::TimeoutFuture;
|
||||
use js_sys::Float32Array;
|
||||
use manganis::asset;
|
||||
use mumble_protocol::control::ClientControlCodec;
|
||||
use mumble_web2_common::{ProxyOverrides, ServerStatus};
|
||||
use reqwest::Url;
|
||||
use std::collections::HashMap;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing::{debug, error, info, instrument};
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_futures::JsFuture;
|
||||
use web_sys::js_sys::{Promise, Reflect, Uint8Array};
|
||||
use web_sys::AudioContextOptions;
|
||||
use web_sys::AudioData;
|
||||
use web_sys::AudioDecoder;
|
||||
use web_sys::AudioDecoderConfig;
|
||||
use web_sys::AudioDecoderInit;
|
||||
use web_sys::AudioEncoder;
|
||||
use web_sys::AudioEncoderConfig;
|
||||
use web_sys::AudioEncoderInit;
|
||||
use web_sys::AudioWorkletNode;
|
||||
use web_sys::EncodedAudioChunk;
|
||||
use web_sys::EncodedAudioChunkInit;
|
||||
use web_sys::EncodedAudioChunkType;
|
||||
use web_sys::MediaStreamConstraints;
|
||||
use web_sys::MessageEvent;
|
||||
use web_sys::WebTransport;
|
||||
use web_sys::WebTransportBidirectionalStream;
|
||||
use web_sys::WebTransportOptions;
|
||||
use web_sys::WorkletOptions;
|
||||
use web_sys::{console, window};
|
||||
use web_sys::{AudioContext, AudioDataCopyToOptions};
|
||||
|
||||
#[allow(unused)]
|
||||
pub use wasm_bindgen_futures::spawn_local as spawn;
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone)]
|
||||
pub struct SpawnHandle;
|
||||
|
||||
impl SpawnHandle {
|
||||
pub fn spawn<F>(&self, future: F)
|
||||
where
|
||||
F: Future<Output = ()> + 'static,
|
||||
{
|
||||
wasm_bindgen_futures::spawn_local(future);
|
||||
}
|
||||
|
||||
pub fn current() -> Self {
|
||||
SpawnHandle
|
||||
}
|
||||
}
|
||||
|
||||
/// Web platform implementation using WebTransport and Web Audio API.
|
||||
pub struct WebPlatform;
|
||||
|
||||
impl super::PlatformInterface for WebPlatform {
|
||||
type AudioSystem = WebAudioSystem;
|
||||
type ConfigSystem = WebConfigSystem;
|
||||
|
||||
fn init_logging() {
|
||||
// copied from tracing_web example usage
|
||||
|
||||
use tracing_subscriber::fmt::format::Pretty;
|
||||
use tracing_subscriber::prelude::*;
|
||||
use tracing_web::{performance_layer, MakeWebConsoleWriter};
|
||||
|
||||
let fmt_layer = tracing_subscriber::fmt::layer()
|
||||
.with_ansi(false) // Only partially supported across browsers
|
||||
.without_time() // std::time is not available in browsers
|
||||
.with_writer(MakeWebConsoleWriter::new()) // write events to the console
|
||||
.with_filter(LevelFilter::DEBUG);
|
||||
let perf_layer = performance_layer().with_details_from_fields(Pretty::default());
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(fmt_layer)
|
||||
.with(perf_layer)
|
||||
.init();
|
||||
|
||||
info!("logging initiated");
|
||||
}
|
||||
|
||||
fn request_permissions() {
|
||||
// No-op on web
|
||||
}
|
||||
|
||||
async fn load_proxy_overrides() -> color_eyre::Result<ProxyOverrides> {
|
||||
let overrides = match option_env!("MUMBLE_WEB2_PROXY_OVERRIDES_URL") {
|
||||
Some(url) => Url::parse(url)?,
|
||||
None => absolute_url("overrides")?,
|
||||
};
|
||||
info!("loading config from {}", overrides);
|
||||
|
||||
let config = reqwest::get(overrides)
|
||||
.await?
|
||||
.json::<ProxyOverrides>()
|
||||
.await?;
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> Result<(), Error> {
|
||||
network_connect(address, username, event_rx, overrides, state).await
|
||||
}
|
||||
|
||||
async fn get_status(client: &reqwest::Client) -> color_eyre::Result<ServerStatus> {
|
||||
Ok(client
|
||||
.get(absolute_url("status")?)
|
||||
.send()
|
||||
.await?
|
||||
.json::<ServerStatus>()
|
||||
.await?)
|
||||
}
|
||||
|
||||
async fn sleep(duration: Duration) {
|
||||
TimeoutFuture::new(duration.as_millis() as u32).await;
|
||||
}
|
||||
}
|
||||
|
||||
trait ResultExt<T> {
|
||||
fn ey(self) -> Result<T, Error>;
|
||||
}
|
||||
|
||||
impl<T> ResultExt<T> for Result<T, JsValue> {
|
||||
fn ey(self) -> Result<T, Error> {
|
||||
match self {
|
||||
Ok(x) => Ok(x),
|
||||
Err(e) => match e.dyn_into::<js_sys::Error>() {
|
||||
Ok(e) => Err(eyre!("{}: {}", e.name(), e.message())),
|
||||
Err(e) => Err(eyre!("{:?}", e)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ResultExt<T> for Result<T, JsError> {
|
||||
fn ey(self) -> Result<T, Error> {
|
||||
self.map_err(|e| JsValue::from(e)).ey()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WebAudioSystem {
|
||||
webctx: AudioContext,
|
||||
processors: AudioProcessorSender,
|
||||
}
|
||||
|
||||
async fn attach_worklet(audio_context: &AudioContext, worklet_url: &str) -> Result<(), Error> {
|
||||
// Create worklets to process mic and speaker audio
|
||||
// Speaker audio processing worklet only required on
|
||||
// browsers that don't support MediaStreamTrackGenerator
|
||||
|
||||
let options = WorkletOptions::new();
|
||||
Reflect::set(
|
||||
&options,
|
||||
&"processorOptions".into(),
|
||||
&wasm_bindgen::module(),
|
||||
)
|
||||
.ey()?;
|
||||
|
||||
info!("loading mic worklet from {worklet_url:?}");
|
||||
audio_context
|
||||
.audio_worklet()
|
||||
.ey()?
|
||||
.add_module_with_options(worklet_url, &options)
|
||||
.ey()?
|
||||
.into_future()
|
||||
.await
|
||||
.ey()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl super::AudioSystemInterface for WebAudioSystem {
|
||||
type AudioPlayer = WebAudioPlayer;
|
||||
|
||||
async fn new() -> Result<Self, Error> {
|
||||
// Create MediaStreams to playback decoded audio
|
||||
// The audio context is used to reproduce audio.
|
||||
let webctx = configure_audio_context();
|
||||
attach_worklet(
|
||||
&webctx,
|
||||
&asset!("/assets/rust_audio_worklet.js").to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let processors = AudioProcessorSender::default();
|
||||
|
||||
Ok(WebAudioSystem { webctx, processors })
|
||||
}
|
||||
|
||||
fn set_processor(&self, processor: AudioProcessor) {
|
||||
self.processors.store(Some(processor))
|
||||
}
|
||||
|
||||
fn start_recording(&mut self, each: impl FnMut(Vec<u8>, bool) + 'static) -> Result<(), Error> {
|
||||
let audio_context_worklet = self.webctx.clone();
|
||||
let processors = self.processors.clone();
|
||||
spawn(async move {
|
||||
match run_encoder_worklet(&audio_context_worklet, each, processors).await {
|
||||
Ok(node) => info!("created encoder worklet: {:?}", &node),
|
||||
Err(err) => error!("could not create encoder worklet: {err}"),
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_player(&mut self) -> Result<WebAudioPlayer, Error> {
|
||||
let sink_node = AudioWorkletNode::new(&self.webctx, "rust_speaker_worklet").ey()?;
|
||||
|
||||
// Connect worklet to destination
|
||||
sink_node
|
||||
.connect_with_audio_node(&self.webctx.destination())
|
||||
.ey()?;
|
||||
|
||||
// Create callback functions for AudioDecoder
|
||||
let decoder_error = Closure::wrap(Box::new(move |e: JsValue| {
|
||||
error!("error decoding audio {:?}", e);
|
||||
}) as Box<dyn FnMut(JsValue)>);
|
||||
|
||||
let sink_port = sink_node.port().ey()?;
|
||||
|
||||
let output = Closure::wrap(Box::new(move |audio_data: AudioData| {
|
||||
// Extract planar PCM from AudioData into an ArrayBuffer or Float32Array
|
||||
// Here we assume f32 samples, 1 channel for brevity.
|
||||
let number_of_frames = audio_data.number_of_frames();
|
||||
|
||||
let js_buffer = Float32Array::new_with_length(number_of_frames);
|
||||
|
||||
let audio_data_copy_to_options = &AudioDataCopyToOptions::new(0);
|
||||
audio_data_copy_to_options.set_format(web_sys::AudioSampleFormat::F32);
|
||||
|
||||
if let Err(e) = audio_data
|
||||
.copy_to_with_buffer_source(&js_buffer.buffer(), &audio_data_copy_to_options)
|
||||
{
|
||||
error!("could not copy audio data to array {:?}", e);
|
||||
}
|
||||
|
||||
// Post to the worklet; include sampleRate and channel count if needed.
|
||||
let msg = js_sys::Object::new();
|
||||
js_sys::Reflect::set(&msg, &"samples".into(), &js_buffer).unwrap();
|
||||
|
||||
sink_port.post_message(&msg).unwrap();
|
||||
|
||||
audio_data.close();
|
||||
}) as Box<dyn FnMut(AudioData)>);
|
||||
|
||||
let audio_decoder = AudioDecoder::new(&AudioDecoderInit::new(
|
||||
decoder_error.as_ref().unchecked_ref(),
|
||||
output.as_ref().unchecked_ref(),
|
||||
))
|
||||
.ey()?;
|
||||
|
||||
audio_decoder.configure(&AudioDecoderConfig::new("opus", 1, 48000));
|
||||
info!("created audio decoder");
|
||||
|
||||
// This is required to prevent these from being deallocated
|
||||
decoder_error.forget();
|
||||
output.forget();
|
||||
|
||||
Ok(WebAudioPlayer(audio_decoder))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WebAudioPlayer(AudioDecoder);
|
||||
|
||||
impl super::AudioPlayerInterface for WebAudioPlayer {
|
||||
fn play_opus(&mut self, payload: &[u8]) {
|
||||
let js_audio_payload = Uint8Array::from(payload);
|
||||
let _ = self.0.decode(
|
||||
&EncodedAudioChunk::new(&EncodedAudioChunkInit::new(
|
||||
&js_audio_payload.into(),
|
||||
0.0,
|
||||
EncodedAudioChunkType::Key,
|
||||
))
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Borrowed from
|
||||
// https://github.com/security-union/videocall-rs/blob/main/videocall-client/src/decode/config.rs#L6
|
||||
fn configure_audio_context() -> AudioContext {
|
||||
let audio_context_options = AudioContextOptions::new();
|
||||
audio_context_options.set_sample_rate(48000 as f32);
|
||||
let audio_context = AudioContext::new_with_context_options(&audio_context_options).unwrap();
|
||||
audio_context
|
||||
}
|
||||
|
||||
trait PromiseExt {
|
||||
fn into_future(self) -> JsFuture;
|
||||
}
|
||||
|
||||
impl PromiseExt for Promise {
|
||||
fn into_future(self) -> JsFuture {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn process_audio(frame: &JsValue, processor: &mut AudioProcessor) -> TransmitState {
|
||||
let Ok(samples) = Reflect::get(&frame, &"data".into()) else {
|
||||
return TransmitState::Silent;
|
||||
};
|
||||
let Ok(samples) = samples.dyn_into::<Float32Array>() else {
|
||||
return TransmitState::Silent;
|
||||
};
|
||||
let input = samples.to_vec();
|
||||
let mut output = Vec::with_capacity(input.len());
|
||||
let state = processor.process(&input, 1, &mut output);
|
||||
if !output.is_empty() {
|
||||
samples.copy_from(&output);
|
||||
}
|
||||
|
||||
state
|
||||
}
|
||||
|
||||
async fn run_encoder_worklet(
|
||||
audio_context: &AudioContext,
|
||||
mut each: impl FnMut(Vec<u8>, bool) + 'static,
|
||||
processors: AudioProcessorSender,
|
||||
) -> Result<AudioWorkletNode, Error> {
|
||||
let constraints = MediaStreamConstraints::new();
|
||||
constraints.set_audio(&JsValue::TRUE);
|
||||
let stream = window()
|
||||
.unwrap()
|
||||
.navigator()
|
||||
.media_devices()
|
||||
.ey()?
|
||||
.get_user_media_with_constraints(&constraints)
|
||||
.ey()?
|
||||
.into_future()
|
||||
.await
|
||||
.ey()?
|
||||
.dyn_into()
|
||||
.map_err(|e| JsError::new(&format!("not a stream: {e:?}")))
|
||||
.ey()?;
|
||||
|
||||
let source = audio_context.create_media_stream_source(&stream).ey()?;
|
||||
let worklet_node = AudioWorkletNode::new(audio_context, "rust_mic_worklet").ey()?;
|
||||
|
||||
let encoder_error: Closure<dyn FnMut(JsValue)> =
|
||||
Closure::new(|e| error!("error encoding audio {:?}", e));
|
||||
|
||||
// Shared state to signal terminator between onmessage and output closures
|
||||
// The output closure runs asynchronously after encoding completes
|
||||
let pending_terminator = Arc::new(AtomicCell::new(false));
|
||||
let pending_terminator_output = pending_terminator.clone();
|
||||
|
||||
// This knows what MediaStreamTrackGenerator to use as it closes around it
|
||||
let output: Closure<dyn FnMut(EncodedAudioChunk)> =
|
||||
Closure::new(move |audio_data: EncodedAudioChunk| {
|
||||
let mut array = vec![0u8; audio_data.byte_length() as usize];
|
||||
audio_data.copy_to_with_u8_slice(&mut array);
|
||||
// Check if this frame was marked as a terminator
|
||||
let is_terminator = pending_terminator_output.swap(false);
|
||||
each(array, is_terminator);
|
||||
});
|
||||
|
||||
let audio_encoder = AudioEncoder::new(&AudioEncoderInit::new(
|
||||
encoder_error.as_ref().unchecked_ref(),
|
||||
output.as_ref().unchecked_ref(),
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
// This is required to prevent these from being deallocated
|
||||
encoder_error.forget();
|
||||
output.forget();
|
||||
let encoder_config = AudioEncoderConfig::new("opus");
|
||||
encoder_config.set_number_of_channels(1);
|
||||
encoder_config.set_sample_rate(48000);
|
||||
encoder_config.set_bitrate(72_000.0);
|
||||
|
||||
audio_encoder.configure(&encoder_config);
|
||||
info!("created audio encoder");
|
||||
|
||||
let mut current_processor = AudioProcessor::new(false);
|
||||
let onmessage: Closure<dyn FnMut(MessageEvent)> = Closure::new(move |event: MessageEvent| {
|
||||
if let Some(new_processor) = processors.take() {
|
||||
current_processor = new_processor;
|
||||
}
|
||||
|
||||
let frame = event.data();
|
||||
let state = process_audio(&frame, &mut current_processor);
|
||||
|
||||
match state {
|
||||
TransmitState::Silent => {
|
||||
// Don't encode or send anything
|
||||
return;
|
||||
}
|
||||
TransmitState::Transmitting => (), // Normal transmission
|
||||
TransmitState::Terminator => {
|
||||
// Mark this as a terminator before encoding
|
||||
pending_terminator.store(true);
|
||||
}
|
||||
}
|
||||
match AudioData::new(frame.unchecked_ref()) {
|
||||
Ok(data) => {
|
||||
let _ = audio_encoder.encode(&data);
|
||||
}
|
||||
Err(err) => {
|
||||
error!(
|
||||
"error creating AudioData object {:?} during event {:?}",
|
||||
err, event,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
Reflect::set(
|
||||
&Reflect::get(&worklet_node, &"port".into()).ey()?,
|
||||
&"onmessage".into(),
|
||||
onmessage.as_ref(),
|
||||
)
|
||||
.ey()?;
|
||||
onmessage.forget();
|
||||
|
||||
source.connect_with_audio_node(&worklet_node).ey()?;
|
||||
worklet_node
|
||||
.connect_with_audio_node(&audio_context.destination())
|
||||
.ey()?;
|
||||
|
||||
Ok(worklet_node)
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub async fn network_connect(
|
||||
address: String,
|
||||
username: String,
|
||||
event_rx: &mut UnboundedReceiver<Command>,
|
||||
overrides: &ProxyOverrides,
|
||||
state: SharedState,
|
||||
) -> Result<(), Error> {
|
||||
info!("connecting");
|
||||
|
||||
let object = web_sys::js_sys::Object::new();
|
||||
|
||||
Reflect::set(
|
||||
&object,
|
||||
&JsValue::from_str("algorithm"),
|
||||
&JsValue::from_str("sha-256"),
|
||||
)
|
||||
.ey()?;
|
||||
|
||||
if let Some(server_hash) = &overrides.cert_hash {
|
||||
let hash = web_sys::js_sys::Uint8Array::from(server_hash.as_slice());
|
||||
web_sys::js_sys::Reflect::set(&object, &"value".into(), &hash).ey()?;
|
||||
}
|
||||
|
||||
let array = web_sys::js_sys::Array::new();
|
||||
array.push(&object);
|
||||
|
||||
debug!("created option object: {:?}", &object);
|
||||
|
||||
let mut options = WebTransportOptions::new();
|
||||
options.set_server_certificate_hashes(&array);
|
||||
|
||||
debug!("created WebTransportOptions");
|
||||
console::log_1(&options.clone().into());
|
||||
|
||||
let transport = WebTransport::new_with_options(&address, &options).ey()?;
|
||||
debug!("created WebTransport connection object");
|
||||
console::log_1(&transport.clone().into());
|
||||
|
||||
if let Err(e) = wasm_bindgen_futures::JsFuture::from(transport.ready())
|
||||
.await
|
||||
.ey()
|
||||
{
|
||||
bail!("could not connect to transport: {e}");
|
||||
}
|
||||
|
||||
info!("transport is ready");
|
||||
|
||||
let stream: WebTransportBidirectionalStream =
|
||||
wasm_bindgen_futures::JsFuture::from(transport.create_bidirectional_stream())
|
||||
.await
|
||||
.ey()?
|
||||
.into();
|
||||
|
||||
let wasm_stream_readable = wasm_streams::ReadableStream::from_raw(stream.readable().into());
|
||||
let wasm_stream_writable = wasm_streams::WritableStream::from_raw(stream.writable().into());
|
||||
|
||||
let read_codec = ClientControlCodec::new();
|
||||
let write_codec = ClientControlCodec::new();
|
||||
|
||||
let reader =
|
||||
asynchronous_codec::FramedRead::new(wasm_stream_readable.into_async_read(), read_codec);
|
||||
let writer =
|
||||
asynchronous_codec::FramedWrite::new(wasm_stream_writable.into_async_write(), write_codec);
|
||||
|
||||
let (outgoing_send, outgoing_recv) = futures_channel::mpsc::unbounded();
|
||||
spawn(crate::sender_loop(outgoing_recv, writer));
|
||||
crate::network_loop(username, state, event_rx, outgoing_send, reader).await
|
||||
}
|
||||
|
||||
pub fn absolute_url(path: &str) -> Result<Url, Error> {
|
||||
let window: web_sys::Window = web_sys::window().expect("no global `window` exists");
|
||||
let location = window.location();
|
||||
Ok(Url::parse(&location.href().ey()?)?.join(path)?)
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct WebConfigSystem {}
|
||||
|
||||
impl super::ConfigSystemInterface for WebConfigSystem {
|
||||
fn new() -> Result<Self, Error> {
|
||||
return Ok(WebConfigSystem {});
|
||||
}
|
||||
|
||||
fn config_get<T>(&self, key: &str) -> Option<T>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
// Get Storage
|
||||
let storage = web_sys::window()?.local_storage().ok()??;
|
||||
|
||||
// Try localStorage first
|
||||
if let Ok(Some(raw)) = storage.get_item(key) {
|
||||
if let Ok(parsed) = serde_json::from_str::<T>(&raw) {
|
||||
return Some(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to default if deserialization fails or key missing
|
||||
let default_value = config_get_default(key)?;
|
||||
serde_json::from_value::<T>(default_value).ok()
|
||||
}
|
||||
|
||||
fn config_set<T>(&self, key: &str, value: &T)
|
||||
where
|
||||
T: serde::Serialize,
|
||||
{
|
||||
let storage = window()
|
||||
.and_then(|w| w.local_storage().ok().flatten())
|
||||
.expect("localStorage not available");
|
||||
|
||||
let json_value =
|
||||
serde_json::to_string(value).expect("failed to serialize config value to JSON string");
|
||||
|
||||
storage
|
||||
.set_item(key, &json_value)
|
||||
.expect("failed to write to localStorage");
|
||||
}
|
||||
}
|
||||
|
||||
fn config_get_default(key: &str) -> Option<serde_json::Value> {
|
||||
let default_config = platform_default_config();
|
||||
default_config
|
||||
.get(key)
|
||||
.cloned()
|
||||
.or(super::global_default_config().get(key).cloned())
|
||||
}
|
||||
|
||||
fn platform_default_config() -> HashMap<String, serde_json::Value> {
|
||||
serde_json::json!({})
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.clone()
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
Reference in New Issue
Block a user